pax_global_header00006660000000000000000000000064136116260300014510gustar00rootroot0000000000000052 comment=fed61c5c8e4460719f896d156d7c67a41eca015f buildbot-2.6.0/000077500000000000000000000000001361162603000133215ustar00rootroot00000000000000buildbot-2.6.0/.bbtravis.yml000066400000000000000000000170441361162603000157440ustar00rootroot00000000000000# BBTravis CI configuration file language: python # Available Python versions: python: - "3.8" label_mapping: TWISTED: tw SQLALCHEMY: sqla SQLALCHEMY_MIGRATE: sqlam latest: l python: py TESTS: t DB_TYPE: db WORKER_PYTHON: wp env: global: - BUILDBOT_TEST_DB_URL=sqlite:// - HYPER_SIZE=m1 - CHROME_BIN=/usr/bin/chromium-browser matrix: - TWISTED=latest SQLALCHEMY=latest TESTS=coverage # add js tests in separate job. Start it early because it is quite long - TWISTED=latest SQLALCHEMY=latest TESTS=js_build HYPER_SIZE=m1 - TWISTED=latest SQLALCHEMY=latest TESTS=js_unit HYPER_SIZE=m2 - TWISTED=latest SQLALCHEMY=latest TESTS=smokes HYPER_SIZE=m2 - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.7 - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.6 - TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=3.5 # Configuration when SQLite database is persistent between running tests # (by default in other tests in-memory SQLite database is used which is # recreated for each test). # Helps to detect issues with incorrect database setup/cleanup in tests. - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=sqlite:////tmp/test_db.sqlite DB_TYPE=sqlite # Configuration that runs tests with real MySQL database (TODO does not work yet with our docker image) - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=mysql+mysqldb://travis@127.0.0.1/bbtest DB_TYPE=mysql # innodb tests takes 20min probably because of docker aufs. # travis images provides much faster innodb so we keep these test there until we implement ramfs based # mysql installation # - TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=mysql+mysqldb://travis@127.0.0.1/bbtest?storage_engine=InnoDB # Configuration that runs tests with real PostgreSQL database with pg8000 and psycopg2 drivers # psycopg2 uses Peer Authentication which is configured in the dockerfile, while pg8000 use md5 auth with dummy password #- TWISTED=latest SQLALCHEMY=latest TESTS=trial BUILDBOT_TEST_DB_URL=postgresql+psycopg2:///bbtest #- TWISTED=latest SQLALCHEMY=latest TESTS=trial 'BUILDBOT_TEST_DB_URL=postgresql+pg8000:///bbtest?user=buildbot&password=x' # Test different versions of SQLAlchemy - TWISTED=17.9.0 SQLALCHEMY=1.1.0 TESTS=trial - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial # Tests for the worker on old versions of twisted. - TWISTED=17.9.0 SQLALCHEMY=latest TESTS=trial_worker matrix: fast_finish: true include: # flake8, isort, pylint, docs first as they're more likely to find issues - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=flake8 - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=isort - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=pylint HYPER_SIZE=m3 - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=docs - python: "3.6" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.7" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial - python: "3.8" env: TWISTED=latest SQLALCHEMY=latest TESTS=interop WORKER_PYTHON=2.7 # keep worker supported on py2.7 - python: "2.7" env: TWISTED=latest SQLALCHEMY=latest TESTS=trial_worker # Dependencies installation commands install: - pip install -U pip - condition: TESTS not in ("smokes", "trial_worker") cmd: pip install -r requirements-ci.txt - condition: TESTS == "trial_worker" cmd: pip install -r requirements-ciworker.txt - condition: TESTS == "docs" cmd: pip install -r requirements-cidocs.txt - condition: '"sqlite" not in BUILDBOT_TEST_DB_URL' cmd: pip install -r requirements-cidb.txt - condition: TESTS == "interop" cmd: | virtualenv -p python$WORKER_PYTHON /tmp/workerenv /tmp/workerenv/bin/pip install -e worker - | # pip installs for backward compat set -e if [ $TWISTED = trunk ]; then pip install git+https://github.com/twisted/twisted fi if [ $TWISTED != latest -a $TWISTED != trunk ]; then pip install Twisted==$TWISTED ; fi if [ $SQLALCHEMY != latest ]; then pip install sqlalchemy==$SQLALCHEMY; fi - step: !ShellCommand command: "pip check" warnOnFailure: True flunkOnFailure: False haltOnFailure: False name: "pip check" title: "pip check" before_script: # create real database for tests - condition: '"mysql" in BUILDBOT_TEST_DB_URL' cmd: sudo /etc/init.d/mysql start - condition: '"postgresql" in BUILDBOT_TEST_DB_URL' cmd: | sudo /etc/init.d/postgresql start # for pg8000 driver we can't use peer authentication or empty password, so set a dummy password # This also serves as a way to wait that the database is ready while ! psql -d bbtest -c 'ALTER USER "buildbot" WITH PASSWORD '"'x'"';' ; do sleep 1 ; done # Tests running commands script: # make frontend_install_tests takes 17 min, so we only do it post submit - title: frontend build tests condition: TESTS == "js_build" and TRAVIS_PULL_REQUEST cmd: make frontend - title: full frontend tests condition: TESTS == "js_build" and not TRAVIS_PULL_REQUEST cmd: make frontend_install_tests - title: frontend unit tests condition: TESTS == "js_unit" cmd: make frontend_tests_headless - title: master and worker tests condition: TESTS == "trial" cmd: trial --reporter=text --rterrors buildbot.test buildbot_worker.test - title: interop tests condition: TESTS == "interop" cmd: SANDBOXED_WORKER_PATH=/tmp/workerenv/bin/buildbot-worker coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test.integration.interop - title: worker tests condition: TESTS == "trial_worker" cmd: trial --reporter=text --rterrors buildbot_worker.test # run tests under coverage for latest only (it's slower..) - title: coverage tests condition: TESTS == "coverage" cmd: coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test buildbot_worker.test # Run additional tests in their separate job - title: pylint condition: TESTS == "pylint" cmd: make pylint - title: flake8 condition: TESTS == "flake8" cmd: make flake8 - title: isort condition: TESTS == "isort" cmd: isort --check -df `git ls-files |grep '.py$'` # Build documentation - title: docs condition: TESTS == "docs" cmd: make docs # Run spell checker on documentation - title: spelling condition: TESTS == "docs" cmd: make -C master/docs SPHINXOPTS=-W spelling # Runs Sphinx' external link checker only on post submit build (it is too unstable) - title: linkcheck condition: TESTS == "docs" and not TRAVIS_PULL_REQUEST cmd: make -C master/docs SPHINXOPTS=-q linkcheck - title: maketarballs condition: TESTS == "smokes" cmd: make tarballs - title: protractor tests condition: TESTS == "smokes" cmd: ./common/smokedist.sh whl - title: tarballs protractor tests condition: TESTS == "smokes" and not TRAVIS_PULL_REQUEST cmd: ./common/smokedist.sh tar.gz notifications: email: false after_script: - | # codecov if [ $TESTS = coverage ]; then CODECOV_TOKEN="b80c80d7-689d-46d7-b1aa-59168bb4c9a9" codecov; fi # List installed packages along with their versions. - "pip list" sudo: false branches: # Only build main-line branches. only: - master - eight git: depth: 300 buildbot-2.6.0/.circleci/000077500000000000000000000000001361162603000151545ustar00rootroot00000000000000buildbot-2.6.0/.circleci/config.yml000066400000000000000000000054111361162603000171450ustar00rootroot00000000000000# Python CircleCI 2.0 configuration file # # Check https://circleci.com/docs/2.0/language-python/ for more details # version: 2 jobs: build: docker: # use the same build image as we use for metabuildbot - image: buildbot/metabbotcfg working_directory: ~/repo steps: - checkout # Download and cache dependencies - restore_cache: keys: - 3-dependencies-{{ checksum "requirements-ci.txt" }}-{{ checksum "requirements-cidocs.txt" }} # fallback to using the latest cache if no exact match is found - 3-dependencies- - run: name: install dependencies command: | env python3 -m venv .venv . .venv/bin/activate pip install -U pip 'setuptools<45.0.0' pip install -r requirements-ci.txt pip install -r requirements-cidocs.txt pip install pyinstaller - save_cache: paths: - .venv key: 3-dependencies-{{ checksum "requirements-ci.txt" }}-{{ checksum "requirements-cidocs.txt" }} - run: name: run tests command: | . .venv/bin/activate make docs make tarballs # Note that installing www/base depends on frontend_deps target being built, which is # a dependency of the tarballs target. pip install -e www/base pyinstaller -F pyinstaller/buildbot-worker.spec # we test the new generated binary with the global virtualenv SANDBOXED_WORKER_PATH=`pwd`/dist/buildbot-worker trial --reporter=text --rterrors buildbot.test.integration.interop - persist_to_workspace: root: dist paths: . - store_artifacts: path: master/docs/_build/html/ destination: docs - store_artifacts: path: dist destination: dist # publish pipeline that is run on tags publish: docker: # image that can push to github - image: cibuilds/github:0.10 steps: - attach_workspace: at: dist - run: name: upload binaries to github release command: | env # rename the buildbot-worker pyinstaller binary mv dist/buildbot-worker dist/buildbot-worker-linux-amd64-$CIRCLE_TAG.bin # upload the github release binary ghr -t $GITHUB_TOKEN -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace $CIRCLE_TAG dist/ workflows: version: 2 build-deploy: jobs: - build: filters: tags: only: /v.*/ - publish: requires: [build] filters: tags: only: /v.*/ branches: ignore: /.*/ buildbot-2.6.0/.coveragerc000077700000000000000000000000001361162603000207702common/coveragercustar00rootroot00000000000000buildbot-2.6.0/.github/000077500000000000000000000000001361162603000146615ustar00rootroot00000000000000buildbot-2.6.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000012611361162603000204620ustar00rootroot00000000000000## Remove this paragraph If you don't remove this paragraph from the pull request description, this means you didn't read our contributor documentation, and your patch will need more back and forth before it can be accepted! Please have a look at our developer documentation before submitting your Pull Request. http://docs.buildbot.net/latest/developer/quickstart.html And especially: http://docs.buildbot.net/latest/developer/pull-request.html ## Contributor Checklist: * [ ] I have updated the unit tests * [ ] I have created a file in the `master/buildbot/newsfragments` directory (and read the `README.txt` in that directory) * [ ] I have updated the appropriate documentation buildbot-2.6.0/.github/stale.yml000066400000000000000000000017451361162603000165230ustar00rootroot00000000000000# Configuration for probot-stale - https://github.com/probot/stale # Number of days of inactivity before an Issue or Pull Request becomes stale daysUntilStale: 60 # Number of days of inactivity before a stale Issue or Pull Request is closed daysUntilClose: 67 # Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable exemptLabels: [] # Label to use when marking as stale staleLabel: stalled # Comment to post when marking as stale. Set to `false` to disable markComment: > This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. # Comment to post when removing the stale label. Set to `false` to disable unmarkComment: false # Comment to post when closing a stale Issue or Pull Request. Set to `false` to disable closeComment: "closing due to our stalled pull request policy" # Limit to only `issues` or `pulls` only: pulls buildbot-2.6.0/.isort.cfg000066400000000000000000000007021361162603000152170ustar00rootroot00000000000000[settings] line_length=110 known_future_library=future known_standard_library=pkg_resources,html known_twisted=twisted,zope,autobahn,klein,txaio known_mock=mock known_third_party=migrate,sqlalchemy,ldap3,txrequests,requests,MySQLdb,coverage,jinja2,dateutil,sphinx,setuptools,jwt,flask,docutils,aiohttp known_first_party=buildbot,buildbot_worker force_single_line=1 sections=FUTURE,FUTURE_LIBRARY,STDLIB,THIRDPARTY,MOCK,TWISTED,FIRSTPARTY,LOCALFOLDER buildbot-2.6.0/.mailmap000066400000000000000000000077071361162603000147550ustar00rootroot00000000000000Abdelrahman Hussein A. T. Hofkamp Amber Yust Andrew Melo Aurélien Bompard Ben Hearsum Ben Hearsum Ben Hearsum Benoît Allard Benoît Allard Benoît Allard Brian Warner Brian Warner Brian Warner Brian Warner Chad S Metcalf Charles Lepple Chris Soyars Dan Scott Daniel Dunbar Daniel Dunbar Douglas Hubler Dustin J. Mitchell Dustin J. Mitchell Dustin J. Mitchell Gary Poster Gary Poster Georges Racinet Geraud Boyer Greg Ward Ian Zimmerman Ian Zimmerman John Carr John Ford John O'Duinn Jon Olsson Jonathan S. Romero Joshua Kugler Justin Wood Justin Wood Justin Wood Justin Wood Kristian Nielsen Lital Natan Louis Opter Louis Opter Marc-Antoine Ruel Marc-Antoine Ruel Marcus Lindblom Mark Lakewood Mark Lakewood Matisse Enzer Michael MacDonald Michael MacDonald Nate Bragg Neil Hemingway Neil Hemingway Nicolas Sylvain Pierre Tardy Quentin Raynaud Randall Bosetti Randall Bosetti Rene Müller Rene Müller Scott Garman Stefan Seefeld Stefan Seefeld Stefan Zager Steve "Ashcrow" Milner William Deegan Zooko Wilcox-O'Hearn adam Harry Borkhuis Andy Howell buildbot-2.6.0/.mention-bot000066400000000000000000000001041361162603000155500ustar00rootroot00000000000000{ "userBlacklist" : [ "tomprince", "djmitche" ] } buildbot-2.6.0/.pyup.yml000066400000000000000000000003551361162603000151220ustar00rootroot00000000000000# update schedule, default is not set # the bot will visit the repo once and bundle all updates in a single PR for the given # day/week/month schedule: "every week" # allowed ["every day", "every week", "every two weeks", "every month"] buildbot-2.6.0/.travis.yml000066400000000000000000000042411361162603000154330ustar00rootroot00000000000000# Travis CI configuration file # http://about.travis-ci.org/docs/ dist: xenial addons: postgresql: "9.4" apt_packages: - enchant - aspell - aspell-en - ispell - iamerican services: - mysql - postgresql language: python # Available Python versions: # http://about.travis-ci.org/docs/user/ci-environment/#Python-VM-images python: - "3.8" env: matrix: # we now use travis only for real database testing # travis containers do have much more optimized db installations - TWISTED=latest SQLALCHEMY=latest TESTS=coverage BUILDBOT_TEST_DB_URL=mysql+mysqldb://travis@127.0.0.1/bbtest?storage_engine=InnoDB # Configuration that runs tests with real PostgreSQL database with pg8000 and psycopg2 drivers - TWISTED=latest SQLALCHEMY=latest TESTS=coverage BUILDBOT_TEST_DB_URL=postgresql+psycopg2:///bbtest?user=postgres - TWISTED=latest SQLALCHEMY=latest TESTS=coverage BUILDBOT_TEST_DB_URL=postgresql+pg8000:///bbtest?user=postgres # Dependencies installation commands install: - pip install -U pip - pip install -r requirements-ci.txt - pip install -r requirements-cidb.txt - "if [ $TWISTED = trunk ]; then pip install git+https://github.com/twisted/twisted ; fi" - "if [ $TWISTED != latest -a $TWISTED != trunk ]; then pip install Twisted==$TWISTED ; fi" - "if [ $SQLALCHEMY != latest ]; then pip install sqlalchemy==$SQLALCHEMY; fi" before_script: # create real MySQL database for tests - mysql -e 'create database bbtest;' # create real PostgreSQL database for tests - psql -c 'create database bbtest;' -U postgres # Tests running commands script: # run real db tests under coverage to have several merging coverage report # https://github.com/codecov/support/wiki/Merging-Reports - "if [ $TESTS = coverage ]; then coverage run --rcfile=.coveragerc $(which trial) --reporter=text --rterrors buildbot.test buildbot_worker.test ; fi" notifications: email: false after_success: - "if [ $TESTS = coverage ]; then codecov ; fi" after_script: # List installed packages along with their versions. - "pip list" sudo: false branches: # Only build main-line branches. only: - master - eight git: depth: 300 buildbot-2.6.0/CONTRIBUTING.rst000066400000000000000000000033401361162603000157620ustar00rootroot00000000000000Contributing to Buildbot ======================== .. contents:: :local: We value your contribution to Buildbot and thank you for it! If it happens that your contribution is not reviewed within two days, please do not hesitate to remind us about it by leaving a comment "Please review this PR". What appears below is just a quick summary. See http://trac.buildbot.net/wiki/Development for the full story. Issues, Bugs, Tickets --------------------- Please file tickets for any bugs you discover at https://github.com/buildbot/buildbot/issues. It is not necessary to file a bug if you are preparing a patch. Submitting Patches ------------------ See http://trac.buildbot.net/wiki/SubmittingPatches for the details. Your contribution must be licensed under the GPLv2, and copyright assignment is not expected. See http://trac.buildbot.net/wiki/LicensingYourContribution for details. You should run ``common/validate.sh`` before sending your patches. Also you can install our git hook for validating and fixing most common coding style issues :: cp common/hooks/post-commit .git/hooks Review ------ Buildbot's code-review process is described at http://trac.buildbot.net/wiki/PatchReview. The important point to know is that Buildbot requires a positive review (adding the "merge-me" label) before a change is eligible to be merged. While we try to perform reviews in a timely fashion, if your review has lagged for a week or more please do feel free to nag us in whatever way is easiest for you. Development Tips ---------------- The easiest way to hack on Buildbot is in a ``virtualenv``. See http://docs.buildbot.net/latest/developer/tests.html#quick-start for a description of how to set up such a thing, and how to run the test suite. buildbot-2.6.0/LICENSE000066400000000000000000000354221361162603000143340ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS buildbot-2.6.0/Makefile000066400000000000000000000123011361162603000147560ustar00rootroot00000000000000# developer utilities DOCKERBUILD := docker build --build-arg http_proxy=$$http_proxy --build-arg https_proxy=$$https_proxy .PHONY: docs pylint flake8 virtualenv VENV_NAME:=.venv$(VENV_PY_VERSION) PIP?=$(VENV_NAME)/bin/pip VENV_PY_VERSION?=python3 WWW_PKGS := www/base www/console_view www/grid_view www/waterfall_view www/wsgi_dashboards www/badges WWW_EX_PKGS := www/nestedexample www/codeparameter WWW_DEP_PKGS := www/guanlecoja-ui www/data_module ALL_PKGS := master worker pkg $(WWW_PKGS) WWW_PKGS_FOR_UNIT_TESTS := $(filter-out www/badges, $(WWW_DEP_PKGS) $(WWW_PKGS)) ALL_PKGS_TARGETS := $(addsuffix _pkg,$(ALL_PKGS)) .PHONY: $(ALL_PKGS_TARGETS) # build rst documentation docs: $(MAKE) -C master/docs # check rst documentation docschecks: $(MAKE) -C master/docs SPHINXOPTS=-W spelling $(MAKE) -C master/docs SPHINXOPTS=-q linkcheck # pylint the whole sourcecode (validate.sh will do that as well, but only process the modified files) pylint: $(MAKE) -C master pylint; master_res=$$?; \ $(MAKE) -C worker pylint; worker_res=$$?; \ if [ $$master_res != 0 ] || [ $$worker_res != 0 ]; then exit 1; fi # flake8 the whole sourcecode (validate.sh will do that as well, but only process the modified files) flake8: $(MAKE) -C master flake8 $(MAKE) -C worker flake8 flake8 --config=common/flake8rc www/*/buildbot_*/ flake8 --config=common/flake8rc www/*/setup.py flake8 --config=common/flake8rc common/*.py frontend_deps: $(VENV_NAME) $(PIP) install -e pkg $(PIP) install mock wheel buildbot cd www/build_common; yarn install --pure-lockfile for i in $(WWW_DEP_PKGS); \ do (cd $$i; yarn install --pure-lockfile; yarn run build); done frontend_tests: frontend_deps for i in $(WWW_PKGS); \ do (cd $$i; yarn install --pure-lockfile); done for i in $(WWW_PKGS_FOR_UNIT_TESTS); \ do (cd $$i; yarn run build-dev || exit 1; yarn run test || exit 1) || exit 1; done frontend_tests_headless: frontend_deps for i in $(WWW_PKGS); \ do (cd $$i; yarn install --pure-lockfile); done for i in $(WWW_PKGS_FOR_UNIT_TESTS); \ do (cd $$i; yarn run build-dev || exit 1; yarn run test --browsers BBChromeHeadless || exit 1) || exit 1; done # rebuild front-end from source frontend: frontend_deps for i in pkg $(WWW_PKGS); do $(PIP) install -e $$i || exit 1; done # do installation tests. Test front-end can build and install for all install methods frontend_install_tests: frontend_deps trial pkg/test_buildbot_pkg.py # upgrade FE dependencies frontend_yarn_upgrade: for i in $(WWW_PKGS) $(WWW_EX_PKGS) $(WWW_DEP_PKGS); \ do (cd $$i; echo $$i; rm -rf yarn.lock; yarn install || echo $$i failed); done # install git hooks for validating patches at commit time hooks: cp common/hooks/* `git rev-parse --git-dir`/hooks rmpyc: find master worker \( -name '*.pyc' -o -name '*.pyo' \) -exec rm -v {} \; isort: isort -rc worker master git diff --name-only --stat "HEAD" | grep '.py$$' | xargs autopep8 -i git commit -a -m "isort+autopep8 run" docker: docker-buildbot-worker docker-buildbot-master echo done docker-buildbot-worker: $(DOCKERBUILD) -t buildbot/buildbot-worker:master worker docker-buildbot-master: $(DOCKERBUILD) -t buildbot/buildbot-master:master master $(VENV_NAME): virtualenv -p $(VENV_PY_VERSION) --no-site-packages $(VENV_NAME) $(PIP) install -U pip setuptools # helper for virtualenv creation virtualenv: $(VENV_NAME) # usage: make virtualenv VENV_PY_VERSION=python3.4 $(PIP) install -e pkg \ -e 'master[tls,test,docs]' \ -e 'worker[test]' \ buildbot_www packaging towncrier @echo now you can type following command to activate your virtualenv @echo . $(VENV_NAME)/bin/activate TRIALOPTS?=buildbot .PHONY: trial trial: virtualenv . $(VENV_NAME)/bin/activate && trial $(TRIALOPTS) release_notes: $(VENV_NAME) test ! -z "$(VERSION)" # usage: make release_notes VERSION=0.9.2 yes | towncrier --version $(VERSION) --date `date -u +%F` git commit -m "relnotes for $(VERSION)" $(ALL_PKGS_TARGETS): cleanup_for_tarballs frontend_deps . $(VENV_NAME)/bin/activate && ./common/maketarball.sh $(patsubst %_pkg,%,$@) cleanup_for_tarballs: find master pkg worker www -name VERSION -exec rm {} \; rm -rf dist mkdir dist .PHONY: cleanup_for_tarballs tarballs: $(ALL_PKGS_TARGETS) .PHONY: tarballs clean: git clean -xdf # helper for release creation release: virtualenv test ! -z "$(VERSION)" # usage: make release VERSION=0.9.2 test -d "../bbdocs/.git" # make release should be done with bbdocs populated at the same level as buildbot dir GPG_TTY=`tty` git tag -a -sf v$(VERSION) -m "TAG $(VERSION)" git push buildbot "v$(VERSION)" # tarballs are made by circleci.yml, and create a github release export VERSION=$(VERSION) ; . .venv/bin/activate && make docs rm -rf ../bbdocs/docs/$(VERSION) # in case of re-run cp -r master/docs/_build/html ../bbdocs/docs/$(VERSION) cd ../bbdocs && git pull . .venv/bin/activate && cd ../bbdocs && make && git add . && git commit -m $(VERSION) && git push @echo When tarballs have been generated by circleci: @echo make finishrelease finishrelease: rm -rf dist python3 ./common/download_release.py rm -rf ./dist/v* ./common/smokedist.sh twine upload --sign dist/* pyinstaller: virtualenv $(PIP) install pyinstaller $(VENV_NAME)/bin/pyinstaller -F pyinstaller/buildbot-worker.spec buildbot-2.6.0/README.rst000066400000000000000000000027301361162603000150120ustar00rootroot00000000000000========== Buildbot ========== -------------------------------------- The Continuous Integration Framework -------------------------------------- Buildbot is based on original work from `Brian Warner `_, and currently maintained by `the Botherders `_. Visit us on http://buildbot.net ! |travis-badge|_ |codecov-badge|_ |readthedocs-badge|_ |fossa-badge|_ Buildbot consists of several components: * master * worker * www/base * www/console_view * www/waterfall_view and so on See the README in each subdirectory for more information Related repositories: * https://github.com/buildbot/buildbot-media - Buildbot-related media * https://github.com/buildbot/buildbot-website - Source for http://buildbot.net .. |travis-badge| image:: https://travis-ci.org/buildbot/buildbot.svg?branch=master .. _travis-badge: https://travis-ci.org/buildbot/buildbot .. |codecov-badge| image:: http://codecov.io/github/buildbot/buildbot/coverage.svg?branch=master .. _codecov-badge: http://codecov.io/github/buildbot/buildbot?branch=master .. |readthedocs-badge| image:: https://readthedocs.org/projects/buildbot/badge/?version=latest .. _readthedocs-badge: https://readthedocs.org/projects/buildbot/builds/ .. |fossa-badge| image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fbuildbot%2Fbuildbot.svg?type=shield .. _fossa-badge: https://app.fossa.io/projects/git%2Bgithub.com%2Fbuildbot%2Fbuildbot?ref=badge_shield buildbot-2.6.0/RELEASING.rst000066400000000000000000000060001361162603000153600ustar00rootroot00000000000000Creating a release ================== This document is documentation intended for Buildbot maintainers. It documents the release process of Buildbot. Step 1: Release notes PR ------------------------ Open a new branch (e.g. `release`) and run the following: . .venv/bin/activate && make release_notes VERSION=x.y.z This collects the release notes using the `towncrier` tool and then commits the result. This step is done as a PR so that CI can check for spelling errors and similar issues. Local checks are insufficient as spelling check in particular depends on what dictionaries are installed. It's best to run `make docs` afterwards and check `master/docs/_build/html/relnotes/index.html` file for obvious rendering errors. This will have much faster turnaround compared to if the error is noticed after the CI runs. If any errors are found, just amend the commit created by `make release_notes`. Step 2: Merge the release notes PR ---------------------------------- Step 3: Perform actual release ------------------------------ This step requires the Buildbot git repository to contain `buildbot` remote that points to https://github.com/buildbot/buildbot and can be pushed to. Additionally, the Buildbot docs repository (https://github.com/buildbot/bbdocs) must be checked out at `../bbdocs` path. Pull the merge commit created on the `master` branch during the step 2. Then run: make release VERSION=x.y.z This will create the required tags, make documentation, copy it to bbdocs repo and push everything. Step 4: Wait for CircleCi to create release tarballs ---------------------------------------------------- The push of tags created during step 3 will activate CircleCi configuration that generates tarballs and uploads them to GitHub. This is a good time to draft a release on the GitHub UI at https://github.com/buildbot/buildbot/releases. Step 5: Upload release to pypi ------------------------------ This step requires GitHub Hub tool to be installed and authorized to GitHub (https://github.com/github/hub). Additionally you have to have access to GPG key that is used to sign the releases. Finally, you have to be added as a maintainer to all Buildbot PyPi projects. To complete the release just run the following: make finishrelease The above will download the releases from GitHub and upload them using twine. If you get bytes-related error after entering Pypi password, you'll need to upgrade Twine. Step 6: Announce the release ---------------------------- This step involves announcing the release of the new Buildbot version on several channels. Write an email to the BuildBot mailing lists: announce@buildbot.net, devel@buildbot.net, users@buildbot.net. Write a blog post on the Buildbot Medium account: https://medium.com/buildbot. The blog post should include the highlights of the release in less monotonous style than the release notes. Any extra important remarks can be added there. Lastly, include the output of `git shortlog --no-merges -ns v...v` to recognize the contributors. buildbot-2.6.0/appveyor.yml000066400000000000000000000047321361162603000157170ustar00rootroot00000000000000# AppVeyor CI # https://www.appveyor.com/docs environment: matrix: # For Python versions available on AppVeyor, see # http://www.appveyor.com/docs/installed-software#python - PYTHON: "C:\\Python35" - PYTHON: "C:\\Python36" install: - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "python -c \"import sys; print(sys.prefix)\"" - "python -c \"import sys; print(sys.exec_prefix)\"" - "python -c \"import sys; print(sys.executable)\"" - "python -V -V" - "python -m pip install -U pip" - "python -m pip install -r requirements-ci.txt" # Twisted requires pywin32 on Windows in order to spawn processes. # for some reason it needs to be installed last or there will be dll loading issues - "python -m pip install \"pywin32\"" - "python -m pip list" - "python -c \"import win32api\"" build: false test_script: - "coverage run --rcfile=common/coveragerc -m twisted.trial --reporter=text --rterrors buildbot.test buildbot_worker.test" - ps: | echo $ENV:PYTHON if ($env:PYTHON -imatch 'C:\\Python27') { iex 'pyinstaller -F pyinstaller/buildbot-worker.spec' iex 'appveyor PushArtifact dist\\buildbot-worker.exe' } on_success: - "coverage xml --rcfile=common/coveragerc -o coverage.xml -i" - "codecov" on_failure: # Store _trial_temp directory as artifact on build failure. # See - ps: | $root = Resolve-Path _trial_temp; [IO.Directory]::GetFiles($root.Path, '*.*', 'AllDirectories') | % { Push-AppveyorArtifact $_ -FileName $_.Substring($root.Path.Length + 1) -DeploymentName trial-log } # Uncomment this part if you want to interactively debug tests on AppVeyor. # This will pause build at the end and setup RDP server. # Connection details will be printed in the build log. # For detail see: https://www.appveyor.com/docs/how-to/rdp-to-build-worker #on_finish: #- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) deploy: release: $(APPVEYOR_REPO_TAG_NAME) description: 'windows binary for buildbot-worker' provider: GitHub auth_token: secure: HQNlcAyaY9Jznbl77rfNatZG62Gg+qFY7emzj5n3Wu16fkr8dLlFNTKOJlfXO5uK artifact: "buildbot-worker.exe" draft: false prerelease: false on: appveyor_repo_tag: true # deploy on tag push only PYTHON: "C:\\Python35" buildbot-2.6.0/common/000077500000000000000000000000001361162603000146115ustar00rootroot00000000000000buildbot-2.6.0/common/code_spelling_ignore_words.txt000066400000000000000000000401571361162603000227510ustar00rootroot00000000000000abspath accesskey accumulateclasslist ack acknowledgement activation activations actuateat actuateattimer actuateok adaptor adbapi addbuild addbuildset addbuildsetcalls addbuildsetforchanges addbuildsetforsourcestamp addbuildsetforsourcestampswithdefaults addbuildsetforxxx addchange addedbuilder addedchanges addlog addservice addsourcestamp addsourcestampset addstep addsuppression admin admin's admins agenced aiohttp allard allen allfiles allowanonymousaccess allowforce allowshutdown allura alwaysrun alwaysuselatest amazonaws andrew andy antoine anysentinel apache api apimaster app appdata applicative apps araujo arg args argv armstrong ascii asdict ashcrow aslist assertable assertargspecmatches assertbuildset assertconsumingchanges assertequal assertisinstance assertproduceswarning assertraisesregex assertraisesregexp assertregex assertregexpmatches assertresults async asynclrucache atlassian atm atomicity attachscheduler attr attrs attributeerror aug auth authz autocommit autoconf autocreatetables autodoc autogenerated autorelease avatarmethods awk aws axx backend backends backlinks backoff backporting backslashing basedir basename basepath baserev basetgz baseurl basicauth baz bb bbot bc bdict bdictlist bear's behaviour ben benchmarking benjamin bennetts berlin bitbucket blahblah blamelist bldr blocksiag blocksize bobrik bool boolean boston botmaster botmaster's botname boto br brainerd branchfile branchtype brd brdict brdicts breq breqs brian brid brid's brids brs bruheim bsd bsid bsids buid buidlrequestcompletions buidlsets builbot buildable buildargs buildbot buildbotnetusagedata buildbotNetUsageData buildbotoptions buildbot's buildbots buildbotting buildboturl buildclass builddir builderadded builderchangedstate builderid buildermasterids buildername buildernames builderremoved builderstatus buildetaupdate buildfactory buildfinished buildid buildmaster buildmaster's buildmasters buildnum buildot buildreq buildrequest buildrequestcompletions buildrequestdistributor buildrequestresults buildrequests buildrequestsconnectorcomponent buildresult buildroot build's buildset buildsetcomplete buildset's buildsets buildsetsubmitted buildslave buildslaves buildstarted buildstatus buildstep buildstepmixin buildsteps buildstepstatus buildworker builtin builtins bulid bulider buliderid butbucket bwaitall bwverbose bytestring bytestrings bzip bzr caas calderone callbacked callconsumer calllater calllater's callremote callwhenrunning cancelcleanshutdown cancelled cancelling canonicalize canstartbuild canstartwithworkerforbuilder cb cbc cbd cd ce cfg cgi chainedprotocolfactory changeadded changedict changefilter changehookresource changehorizon changeid changeids changemaster change's changesconnectorcomponent changeset changesource changesourceid changesources charset charsets chdict chdir chdir'ed checkconfig checkfirst checkin checkip checkoutdelay checkworkerhascommand childs chmod choosenextbuild christopher chroot chunkify ci clientconnectionlost clientid clientids clientsecret clobberonbranchchange clobberonfailure closestdin cls cmake cmd cmdclass cmdline cmdref cmopared cmp codebase codebasegenerator codebases codec collapserequests comitters commandcomplete commandinterrupted commandname commitish committer committers committer's compability comparablemixin comparaison comparator compat compatiblity completers compresslog computesourcerevision comspec concat conchc config configfile configfunc configs configurator configurators configjson conn connectedness connectionmade connectionpool contrib copyablefailure cors cowbuilder cppcheck cpu cpython cray createabsolutesourcestamps createmaster createsummary createworker cred credc creds cribed croniter css ctime ctrl currentbuilds currentstep cvar cvs cvsroot daemonize dan darcs datafields datareceived datetime dateutil's dave david dbapi DBConnector dbpool dbs ddl de debian debounce debounced debouncing decoderc deepcopy defaultbranch defaultdeny defaultwhenfalse deferredlist deferred's deferreds defertothread delayedcall denormalized deprecatedmoduleattribute descriptionsuffix desynchronization dev DEVNULL dict dicts dicttype diff diffs dir directoryenterpattern directoryupload dirname dirs dirwatcher disownserviceparent distutils dn dnotify doclobber doconfig docopy docstring doctest doctests documentclass dom donovan dostepif downloadfile downloadstring dradez dss du dup durations durchmesser dustin ee eg egypt encoding encodings enforcechosenworker enginestrategy enosuch ensurehasssl entitytype env environ eof epydoc eqconnectionpool eric errback errback'd errbacked errbacks err'ed errno erroring errormessages errortoo et evaluatecommand exe execfile executables exe's expanduser expcalls expectcomplete expectedbuildset expectedfailures expectedmessages expectedreturn expectmessage expectsuccess extractall extrapackages eyal failedtogetperspective fakebotmaster fakeconnector fakedb fakemaster fakemethod fakeopen fakeserviceparent fakeworkerworker fallback fallbacks favicon fbi fd fdescfs fdopen fds featureful feb fifo fileloc filename FILENAME filenames filepath filesystem filewriter findbuilderid findchangesourceid findsomethingid finduserbyattr firewaall firstname fixme fixup fk fks flunkonfailure flunkonwarnings flushloggederrors flushwarnings fn fnmatch foo foobar foogit foreignkey formatargspec formatinterval fp fqdn freebsd freshcvs fri frontend fromchdict fromdir fs fullname func funcname gardiner gatherresults gayton gc gcc gc'd gerrit getaddress getargspec getattr getbuild getbuilderid getbuildrequest getbuildrequests getbuildsetproperties getchange getchangeclassifications getchangefromssid getchanges getchangesource getchangesources getcodebase getcommand getconfigfilefromtac getdescription getevent getexitcodeprocess getfromkwargs getlastfinishedbuild getlog getloginresource getloglines getname getnextbuildtime getnumber getpage getperspective getpid getppid getprocessoutput getprocessoutputandvalue getprocessoutputandvaluewithinput getpwnam getrenderingfor getresponsibleusersforbuild getresults getrlimit getrootobject getschedulers getservice getslaveinfo getsourcestamp getspec getstate getsteps getter gettestresults gettext gettimernameforchange geturlforbuild geturlforthing getworker getworkerinfo gf gib giger gitattributes github gitlab gitorious gitpoller globals gmail gmt google googlegroups googlesource gopts gotchange gotperspective gotrootobject gpo gracefulshutdown gravatar groupmemberpattern gtk guanlecoja gz gzip haltonfailure hammond handlejobfile hardcoded hartwell hasattr hashable hashcolumns hashedcolumns haskey hasproperty hermann herve hg hinputwritetemp hmac hoc hostname hostnames houtreadtemp howto hprocess hthread html http httpclientservice https ibuilderstatus ibuildrequestcontrol ibuildrequeststatus ibuildsetstatus ibuildstatus ichange ico icredentialschecker ics i'd ident idn idsdeferred ie iff i'm impl ina incrementing indices influxdb infos init initializers initialstdin inline inlinecallbacks innodb inrepo inrepos inserttestdata installdir instanceuri instantiation instantiations insubstantiate insubstantiated insubstantiating insubstantiation integrations internet interoperability interruptable interruptsignal intialization intranet io ip ippolito iproperties iproperty iprops iprotocol irc ircclient irealm irenderable isavailable isbusy isdir ish isinstance ismessageneeded isotoma istatus istatusreceiver isworkerdir itamar iter iterable ivar ivars jã jacobs james jason java javascript jellyable jessica jinja jmason jobdir jobfiles jobid joe jonathan js json jsonable jsonapi json'd jsonrc jsonrpc jwt keepalive keepaliveinterval keepalives keepalivetimeout keepstamp keepstderr keepstdinopen keepstdout kevin keyerror keyfile keypair keypairs keyring klass knielsen kube kubectl kubernetes kv kvm kwarg kwargs lange largebinary lastactuation lastlog lastname lastresort lasttrigger latentworkerforbuilder latin lazylogfiles ldap ldapuserinfo lefkowitz legator len libvirt lifecycle lintian linux listdir listentcp liveness loadconfig localhost localtime lockaccess lockclass lockfiles lockid logchunk logchunks logentries logentry logenviron logfile logfiles logfinished logid login logmsg logname logout logrotate logstarted longaccess lookahead lookup lookups loopback loseconnection lostremote lotem lp lru mailchangesource maildir maildirs maintainance makedirs makefile makerbase makeremoteshellcommand maketelnetprotocol manba mangold manifestbranch manifestfile manifestoverrideurl manifesturl maraujop marius markh marshalled maruel mary massachusetts masterdest masterfqdn masterid masterlock mastersideworker masterstatus matcher matchers matthew maxcount maxcountforworker maxdelay maxint maxlength maxsize maxthreads maxtime maybeaddwarning maybebuildsetcomplete maybedeferred maybestartbuild maybestartbuilds maybestartbuildsforbuilder maybestartbuildsforworker maybestartbuildson maybestartbuildsonbuilder mb md meijer melo merchantability mesos messagereceived metadata meth methodtemplate methodundertest microsoft middleware miguel milner mingw minidom minidom's minikube minimalistic mintime mispelling misr mit mixedcase mixin mkdir mkstemp mobarak moduleauthor mon monkeypatch monkeypatches moshe moto mq mr msc msdn msg msgbody msys mtime mtn mtr mtrlogobserver multi multiline multimaster multiservice multiservice's multithreading mumbo munge mysql mysqlclient mysqld mysql's namedservice namedtuple namespace namespaces nat nestedlist netstring netstrings newcred newmsg nextbuild nextworker nfs ngrok niklaus nobody's nodaemon nonexisting noninfringement noop noqa noreconfig noreply nosuchmethod notabranch notifyondisconnect nov novaclient noworkererror nstderr nt nullable num oauth oauthconf objectid offline oid ok oldcred online onlinepubs onlyifchanged onlyimportant openfile opengroup oposite optfile optflags optparameters ordermatters ored org orgs orm os osaf osx otherstatusreceivers overridable overridebuildsetmethods pam paramiko params parms parseable parsegotrevision parsejob passwd patchable patchid patchlevel patchset patchup pathnames pathpattern pathpatterns pathsep paul pauseproducing pavel pb pbamanger pbconnectionlost pbmanager pbmanager's pbuilder pendingbuilds pergamenshchik periodicbuildtimer peticolas pfactory pid pidfile pidfiles pids pitrou pluggableauthenticationmoduleschecker plugin plugins pn pollatlaunch pollatreconfigure poller pollers pollingchangesource pollinterval popnextbuild portstr portstrs posix postdata postgres postoptions postpath ppl pragma pre precompute preferlastchangedrev prefilter preflight prepend prepended prepends preston printstatus prioritizebuilders privmsgs proc processdone processended processterminated procgroupprocess produceevent programmatically progressmetrics proj projectname proto prs pserver pty ptys pullrequests pushjet pwd py pycrypto pyd pyflakes pygments pyjade pylint pylint's pyparsing pythonpath qa qmail quickmode quickstart quiesce radez raiseexpectationfailure ralph raml rc reactorname readlines readsourcedata readthedocs realdatabasemixin realmasterlock realworkerlock reasonstring recoded recompress reconf reconfig reconfig'd reconfigresource reconfigs reconfigservice reconfigservicebuilders reconfigservicewithconstructorargs reconfigservicewithsibling reconfigurability reconfigurable reconfiguring reconnection recurse redhat reentrant refactor refcount refetch regex regexes regexp regexps regexs registeradapter reid rejectedworkers releaselocks releasers remoteaddresses remotecommand remotecomplete remotegetworkerinfo remote's remoteshellcommand remotestep remotetransfer removeservice renderable renderables renderer renderers repl repo repodownloads repoll reponame reponds repourl repr req requestavatar requestavatarid requestcancelled requestjson requestsubmitted requeue requeued requiredargs reschedulenextbuild resetmocks resourceneedsreconfigs resourcetype restrmatcher restructuredtext resultdir resultsdeferred resultsmixin resultspec resumeproducing retreive retryable retryfetch reviewcb revisionfor revlink revno revnum revset revsets rewrap rewrapped rewraps rf rgen rhead rieder riley rmdir rmdirrecursive rmfile rmtree robocopy routingkey rp rpc rpmdir rpmlint rpms rpmspec rsa rsh rst rstrip rtype rtypes runcommand runinteraction runprocess runquery runstep's runtest runtime rv sa saas safecat saturday sb sched schedulerid schedulernames scheduler's schwarzian scm sean secpol secretkey sectionauthor seealso sendbuilderlist sendchange sendcompletionmessage sendmail sendstatus sendstderr sendstdout sendupdate seqdiag servicemanager setbuilderlist setchangesourcemaster setcommand setdefault setid setpgrp setproperty setserviceparent setstate settext setupbasedir setupbuildresults setupmailnotifier setupsite setupsourcestep setupstep sha shellcommand shlex showblamelist shtull shutil sig sighup sigkill signalled signalling signame sigterm sigtermtime sigtermtimeout simms simplifiable singlebranchscheduler slavedest slavesrc slowdb smallinteger smilner somecommand somedays sourcedata sourcedir sourceforge sourcestamp sourcestampid sourcestamps sourcestampset sourcetamp spam spamassassin spawnprocess specdir specfile specfiles split splitter spulec sql sqlalchemy sqlite sqlites src srcdir srcrpmdir srpm ssdict sse sshd sshpublickeydatabase ssid ssids ssl stackexchange stacklevel stackoverflow stacktrace startbuild startcommand startgettingperspective startlogin startmissingtimer startservice startup startvc startworker stat'd statm statusdict stderr stdin stdlib stdout stepetaupdate stepfinished stepid stepstarted steve stickysidebar stopbuild stop's stopservice stopworker str streamlog strerror strftime stringreceived stringsmatcher striplevel strports strptime sts subarg subclassability subclassed subclassers subclasses subclassing subcommand subcommands subdir subdirectories subdirectory subfields sublicense submittedat submodule submodules suboption suboptions subprocess subprocesses subquery substring subunit summarycb suppressionfile suppressions suppresstags svc svciddeferred svn svnpoller svn's svnuricanonicalize svnversion sw swartz symlink syncallbranches syncmail syncQuietly syntaxerror sys tac taichino tarball tbl tcp tcpdump teardown teardownbasedir tempfile terminateprocess testcase testcases testchanges testnamelimit testpath testslave testsuite texinfo textbox textlimit tf th that'll thijs thingie thingy thomas threadpool ths thurs thursday tid timedelta timestamp timezone timezones tld tls tmp tmpbb tmpl tochanges toctree todir todo tokenization tomprince topdir topfile toplevel tport trac traceback trauring travis treestabletimer treestabletimers treq trialargs trialmode triemstra triggerable tue tues tuesday tuple Tuple tuples twistd twistedmatrix twisted's txgithub txrequest txrequests txt tz ubuntu ufffd ui uid umask un unabbreviated unambiguity unclaim unclaimedbrdicts unclosed unconfigure unconfigured underpowered unencrypted unexpectedsuccesses ungrouped unhandled unicast unicode unicoded unicodified unicodify unique'd unittest unix unixpassworddatabase unlink unlinked unparsable unparseable unpatch unregister unregisters unserialized unsubscribe unsubscribed unsubscriptable unsubscription unsubsribed unsubstantiated untarring untracked upcall updatable updatebuilderlist updatebuildsummarypolicy updatefromkwargs updateinterval updatenum updateof updatesession updatesummary updatesummary's updatetarballage updateuser uploaddirectory uploadfile uppercased uri url urldecoded urllib urlopen urlparse urls urltext usa usageerror usechange uselog useprocgroup usepty usererrors username usetestcasenames usetls usr utc utf util utils uuid validator valuefromstring vc vcs vcsrevision vcx vda verifymessage verifymessages versioned versioning viewspec viff virtualenv visualstudio vm vms waitforbuilderlist waitforfinish waituntilavailable wal wamp wannabuild wantdata wantdb wantmq warner warningextractor warningpattern warnonfailure warnonwarnings wb weakref webclient webdriver webhook webhooks webserver websocket wehn werzeug wfb wget whatever's whereclauses whitespace wiki wikipedia wildcard winerror wireshark wkdir workdir workdirs workerbuilddir workerdest workerdir workerenvironment workerforbuilder workerforbuilders workerlock workermanager workername workernames workersrc workersrcs workerversion workerworker worststatus writesourcedata writestdin ws www xbsd xda xhtml xml xxab xxx yieldmetricsvalue za zadka zope buildbot-2.6.0/common/coveragerc000066400000000000000000000014161361162603000166560ustar00rootroot00000000000000[report] # Regexes for lines to exclude from consideration exclude_lines = # Have to re-enable the standard pragma pragma: no cover # Don't complain about missing debug-only code: def __repr__ if self\.debug # Don't complain if tests don't hit defensive assertion code: raise AssertionError raise NotImplementedError # Don't complain if non-runnable code isn't run: if 0: if __name__ == .__main__.: # 'pass' generally means 'this won't be called' ^ *pass *$ # conditionals on twisted versions aren't coverable if twisted.version include = master/* worker/* omit = # omit all of our tests */test/* # templates cause coverage errors */templates/* master/buildbot/scripts/buildbot_tac.tmpl buildbot-2.6.0/common/download_release.py000066400000000000000000000041031361162603000204700ustar00rootroot00000000000000#!/usr/bin/env python3 import os import requests import yaml def download(url, fn): print(url, fn) if os.path.exists(fn): return with open(fn, 'wb') as f: r = s.get(url, stream=True) for c in r.iter_content(1024): f.write(c) def main(): global s with open(os.path.expanduser("~/.config/hub")) as f: conf = yaml.safe_load(f) token = conf['github.com'][0]['oauth_token'] s = requests.Session() s.headers.update({'Authorization': 'token ' + token}) r = s.get("https://api.github.com/repos/buildbot/buildbot/releases/latest") r.raise_for_status() r = r.json() tag = r['name'] upload_url = r['upload_url'].split('{')[0] assets = s.get("https://api.github.com/repos/buildbot/buildbot/releases/{id}/assets".format(id=r['id'])) assets.raise_for_status() assets = assets.json() os.system("mkdir -p dist") for url in (a['browser_download_url'] for a in assets): if url.endswith(".whl") or url.endswith(".tar.gz"): fn = os.path.join('dist', url.split('/')[-1]) download(url, fn) # download tag archive url = "https://github.com/buildbot/buildbot/archive/{tag}.tar.gz".format(tag=tag) fn = os.path.join('dist', "buildbot-{tag}.gitarchive.tar.gz".format(tag=tag)) download(url, fn) sigfn = fn + ".sig" if os.path.exists(sigfn): os.unlink(sigfn) # sign the tag archive for debian os.system("gpg --output {} -b {}".format(sigfn, fn)) sigfnbase = os.path.basename(sigfn) r = s.post(upload_url, headers={'Content-Type': "application/pgp-signature"}, params={"name": sigfnbase}, data=open(sigfn, 'rb')) print(r.content) fnbase = os.path.basename(fn) r = s.post(upload_url, headers={'Content-Type': "application/gzip"}, params={"name": fnbase}, data=open(fn, 'rb')) print(r.content) # remove files so that twine upload do not upload them os.unlink(sigfn) os.unlink(fn) if __name__ == '__main__': main() buildbot-2.6.0/common/flake8rc000066400000000000000000000021271361162603000162350ustar00rootroot00000000000000[flake8] show-source = yes statistics = yes count = yes # gradually reduce to 100 (previously this warning was disabled) max-line-length = 140 # List of currently ignored PEP8 issues. Some of them definetely should be # enabled in future. # # E122 continuation line missing indentation or outdented # E123 closing bracket does not match indentation of opening bracket's line # (pep8 seems to misdiagnose this) # E126 continuation line over-indented for hanging indent # E128 continuation line under-indented for visual indent # E211 whitespace before '(' # E711 comparison to None should be 'if cond is None:' # E712 comparison to False should be 'if cond is False:' or 'if not cond:' # E721 do not compare types, use 'isinstance()' # W503 line break before binary operator # W504 line break after binary operator # E731 do not assign a lambda expression, use a def # H302 only import modules (we also import classes) # H301 one import per line (we rely on isort for that) # H306 sorted import (we rely on isort for that) ignore = E122,E123,E126,E128,E211,E711,E712,E721,E731,W503,W504,H302,H306,H301, buildbot-2.6.0/common/hooks/000077500000000000000000000000001361162603000157345ustar00rootroot00000000000000buildbot-2.6.0/common/hooks/post-commit000077500000000000000000000000411361162603000201300ustar00rootroot00000000000000common/validate.sh HEAD~ --quick buildbot-2.6.0/common/maketarball.sh000077500000000000000000000006561361162603000174360ustar00rootroot00000000000000#!/bin/bash set -e pkg=$1 ( cd ${pkg} rm -rf MANIFEST dist if [ ${pkg} == "master" ]; then python setup.py sdist # wheels must be build separately in order to properly omit tests python setup.py bdist_wheel else # retry once to workaround instabilities python setup.py sdist bdist_wheel || (git clean -xdf; python setup.py sdist bdist_wheel) fi ) cp ${pkg}/dist/* dist/ buildbot-2.6.0/common/merge_and_pep8.sh000066400000000000000000000050371361162603000200270ustar00rootroot00000000000000#!/bin/bash function status() { _ESC=$'\e' LTCYAN="$_ESC[1;36m" NORM="$_ESC[0;0m" echo "" echo "${LTCYAN}-- ${*} --${NORM}" } function newshell() { echo "I will launch a new shell. When you are done, just exit the shell" echo "and I will continue the process" bash echo "ok lets continue" } function unittests() { status run the whole test suite as a double check find . -name \*.pyc -exec rm {} \; trial --reporter=text buildbot_worker buildbot if [[ $? != 0 ]] then echo "Oups.. the tests are failing, better resolve them now before the big autopep8 work" newshell fi } if [ $# -eq 0 ]; then echo "USAGE: common/merge_and_pep8.sh " echo " This script will merge your branch to master" echo " and apply pep8" echo "Run this if you want to contribute a branch based on pre-autopep8 rework" exit 1 fi MASTER=$1 PREPEP8=`git log $MASTER --grep "PRE_PEP8_COMMIT" --pretty="format:%H"` POSTPEP8=`git log $MASTER --grep "POST_PEP8_COMMIT" --pretty="format:%H"` status "merging against last commit before autopep8" git merge $PREPEP8 if [[ $? != 0 ]] then echo "Please fix the merge conflicts between your branch, and last commit before autopep8!" newshell fi status "merging against first commit after autopep8 and take our version when there are conflicts" git merge $POSTPEP8 # autopep8 takes 1h30 to run on the whole codebase, so let git resolve the obvious merge conflicts. # using -s recursive -x ours works at chunk level, which proved not to work for nine -> master merge if [[ $? != 0 ]] then status "resolve conflicts by checking out ours file" git status --porcelain |egrep "^DU" | awk '{print $2}' | xargs git rm git status --porcelain |egrep "^UU" | awk '{print $2}' | xargs git checkout --ours git status --porcelain |egrep "^UU" | awk '{print $2}' | xargs git add git commit --no-edit fi unittests status "re-apply autopep8 on the files modified by our branch" git diff --name-only $POSTPEP8 | ( # there is no real use of displaying output of autopep8 # so we just display a simple progress status FILES=() while read filename; do FILES+=($filename) done n=0 for filename in ${FILES[@]}; do n=$(($n + 1)) echo -n $(($n * 100 / ${#FILES[@]}))% echo " processing $filename" echo "$filename" | bash common/style_check_and_fix.sh >&/dev/null done ) git commit -s -a -m "re-auto-pep8" unittests status "finally merge to latest version of master" git merge $MASTER buildbot-2.6.0/common/porttostable.py000066400000000000000000000025421361162603000177100ustar00rootroot00000000000000from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from subprocess import CalledProcessError from subprocess import check_output import requests import yaml s = requests.Session() with open(os.path.expanduser('~/.config/hub')) as f: config = yaml.load(f)['github.com'][0] s.auth = config['user'], config['oauth_token'] os.system("git fetch --all") r = s.get("https://api.github.com/search/issues?q=label:\"port%20to%20stable\"+repo:buildbot/buildbot") to_port = r.json() summary = "" for pr in to_port['items']: r = s.get("https://api.github.com/repos/buildbot/buildbot/pulls/{number}/commits".format(**pr)) commits = r.json() for c in commits: title = c['commit']['message'].split("\n")[0] try: check_output("git cherry-pick {sha} 2>&1".format(**c), shell=True) except CalledProcessError as e: os.system("git diff") os.system("git reset --hard HEAD 2>&1 >/dev/null") if '--allow-empty' in e.output: continue if 'fatal: bad object' in e.output: continue print("cannot automatically cherry-pick", pr['number'], c['sha'], title, e.output) else: summary += "\n#{number}: {title}".format(number=pr['number'], title=title, **c) print(summary) buildbot-2.6.0/common/pylintrc000066400000000000000000000253131361162603000164040ustar00rootroot00000000000000[MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore= # Pickle collected data for later comparisons. persistent=no # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= [MESSAGES CONTROL] # For now disable bunch of checks that does not pass. Some of them should be # re-enabled and reported issues fixed, while most are bugs in pylint and could # be re-enabled when those are fixed. # Following are the checks we don't care about, and thus should remain disabled # # blacklisted-name # missing-docstring # too-many-lines # no-self-use # duplicate-code # too-many-ancestors # too-many-instance-attributes # too-few-public-methods # too-many-public-methods # too-many-return-statements # too-many-branches # too-many-arguments # too-many-locals # too-many-statements # abstract-class-not-used # abstract-class-little-used # exec-used # star-args # deprecated-module # fixme # global-variable-undefined # unused-argument # unpacking-non-sequence # maybe-no-member # "bad-continuation" disabled due to conflict with flake8 (and PEP8) # See # flake8 wants: # func("..." # ) # pylint wants: # func("..." # ) disable= blacklisted-name, invalid-name, missing-docstring, too-many-lines, no-self-use, duplicate-code, too-many-ancestors, too-many-instance-attributes, too-few-public-methods, too-many-public-methods, too-many-return-statements, too-many-branches, too-many-arguments, too-many-locals, too-many-statements, abstract-class-not-used, abstract-class-little-used, deprecated-module, fixme, global-variable-undefined, unused-argument, maybe-no-member, locally-disabled, bad-classmethod-argument, line-too-long, method-hidden, no-name-in-module, no-member, not-callable, too-many-function-args, unexpected-keyword-arg, redundant-keyword-arg, import-error, import-outside-toplevel, exec-used, star-args, unreachable, dangerous-default-value, pointless-statement, pointless-string-statement, expression-not-assigned, useless-else-on-loop, bad-builtin, attribute-defined-outside-init, protected-access, arguments-differ, signature-differs, abstract-method, super-init-not-called, no-init, non-parent-init-called, bad-indentation, global-statement, unused-variable, redefined-outer-name, redefined-builtin, unidiomatic-typecheck, undefined-loop-variable, unbalanced-tuple-unpacking, broad-except, bad-open-mode, superfluous-parens, no-self-argument, no-value-for-parameter, interface-not-implemented, bad-continuation, keyword-arg-before-vararg, unsubscriptable-object, inconsistent-return-statements, useless-object-inheritance, deprecated-method, useless-return, no-else-return, assignment-from-none, comparison-with-callable, comparison-with-itself, assignment-from-no-return, stop-iteration-return, old-style-class, redefined-variable-type, deprecated-lambda, bad-string-format-type, # https://github.com/PyCQA/pylint/issues/2631 cyclic-import, # only happens when pylint is ran in non-parallel mode [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Change the default error message template. msg-template={path}:{line} {msg} [{symbol}] # Include message's id in output include-ids=yes # Include symbolic ids of messages in output symbols=no # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=no # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no [SPELLING] # Spelling dictionary name. # If this value will be non-empty (e.g. 'en_US') and pyenchant will not be # installed, pylint will fail. # If this will be left empty pylint will ignore all spelling errors. spelling-dict=en_US # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. # Path relative to current working directory. spelling-private-dict-file=../common/code_spelling_ignore_words.txt # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [FORMAT] # Maximum number of characters on a single line. max-line-length=80 # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes=SQLObject # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [BASIC] # Will be removed in PyLint 2.0 # Required attributes for module, separated by a comma #required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression which should only match correct module level names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression which should only match correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression which should only match correct function names function-rgx=[a-z_][a-zA-Z0-9]{2,30}$ # Regular expression which should only match correct method names method-rgx=[_]{0,2}[a-z][a-zA-Z0-9]{2,30}[_]{0,2}$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-zA-Z0-9]{2,30}$ # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct list comprehension / # generator expression variable names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression which should only match functions or classes name which do # not require a docstring no-docstring-rgx=__.*__ [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). dummy-variables-rgx=_|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= [CLASSES] # This option will be removed in PyLint 2.0. # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. # ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,string,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branchs=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception buildbot-2.6.0/common/smokedist-download-compatible-chromedriver.py000077500000000000000000000060161361162603000256040ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import re from subprocess import DEVNULL from subprocess import check_call from subprocess import check_output def parse_chrome_major_version(output): for line in output.splitlines(): # e.g.: # Chromium 69.0.3497.81 Built on Ubuntu , running on Ubuntu 18.04 # Google Chrome 70.0.3538.77 m = re.match(r'.*[cC]hrom.*\s(\d+)\.(\d+)\.(\d+)(?:\.\d+|).*', line) if m is not None: return int(m.group(1)), int(m.group(2)), int(m.group(3)) return None def get_chrome_version(browsers): for browser in browsers: try: print([browser, ' --version']) output = check_output([browser, ' --version'], stderr=DEVNULL) output = output.decode('utf-8', errors='ignore') version = parse_chrome_major_version(output) if version is not None: return (browser, version) except Exception: pass return (None, None) def main(): parser = argparse.ArgumentParser( prog='smokedist-download-compatible-chromedriver') parser.add_argument('manager', type=str, help="Path to the webdriver-manager") parser.add_argument('browsers', type=str, nargs='+', help="The browsers to get version info from. The first " "existing browser from the list will be used") args = parser.parse_args() try: browser, version = get_chrome_version(args.browsers) if browser is None: raise Exception('Could no get browser version') print('Using {0} release {1}'.format(browser, version)) chrome_major, chrome_minor, chrome_patch = version if chrome_major >= 73: # webdriver manager requires us to provide the 4th version component, however does not # use it when picking the version to download chromedriver_version = '{}.{}.{}.0'.format(chrome_major, chrome_minor, chrome_patch) else: chrome_major_to_chromedriver = { 73: '2.46', 72: '2.46', 71: '2.46', 70: '2.45', 69: '2.44', } if chrome_major not in chrome_major_to_chromedriver: raise Exception('Unknown Chrome version {}.{}.{}'.format( chrome_major, chrome_minor, chrome_patch)) chromedriver_version = chrome_major_to_chromedriver[chrome_major] print('Using chromedriver release {0}'.format(chromedriver_version)) cmd = [args.manager, 'update', '--versions.chrome', chromedriver_version, '--versions.standalone', '3.141.59'] print('Calling: ' + ' '.join(cmd)) check_call(cmd) return except Exception as e: print(str(e)) print('Failed to get compatible chromedriver version, using latest') check_call([args.manager + ' update'], shell=True) if __name__ == '__main__': main() buildbot-2.6.0/common/smokedist-www-backwards-compat.py000077500000000000000000000064701361162603000232410ustar00rootroot00000000000000#!/usr/bin/env python3 import argparse import json import os import shutil import subprocess def checkout_buildbot_at_revision(curr_buildbot_root, test_buildbot_root, revision): if os.path.isdir(test_buildbot_root): print('Removing {}'.format(test_buildbot_root)) shutil.rmtree(test_buildbot_root) os.makedirs(test_buildbot_root) subprocess.check_call(['git', 'clone', curr_buildbot_root, test_buildbot_root]) subprocess.check_call(['git', 'reset', '--hard', revision], cwd=test_buildbot_root) def install_local_dependencies(curr_buildbot_root, test_buildbot_root): packages = [ # data_module must be first, then guanlecoja-ui, as other packages depend on them 'www/data_module', 'www/guanlecoja-ui', 'www/base', 'www/codeparameter', 'www/console_view', 'www/grid_view', 'www/waterfall_view', 'www/wsgi_dashboards', ] for package in packages: package_root = os.path.join(test_buildbot_root, package) package_json_path = os.path.join(package_root, 'package.json') with open(package_json_path) as in_f: contents = json.load(in_f) replacements = [ ('guanlecoja-ui', 'link:' + os.path.join(curr_buildbot_root, 'www/data_module')), ('buildbot-data-js', 'link:' + os.path.join(curr_buildbot_root, 'www/guanlecoja-ui')), ('buildbot-build-common', 'link:' + os.path.join(curr_buildbot_root, 'www/build_common')), ] for dep_key in ['dependencies', 'devDependencies']: if dep_key not in contents: continue deps = contents[dep_key] for package, target in replacements: if package in deps: deps[package] = target with open(package_json_path, 'w') as out_f: json.dump(contents, out_f, indent=4, sort_keys=True) def run_test(test_buildbot_root): subprocess.check_call(['make', 'tarballs'], cwd=test_buildbot_root) subprocess.check_call(['common/smokedist.sh', 'whl'], cwd=test_buildbot_root) def main(): parser = argparse.ArgumentParser(prog='smokedist-www-backwards-compat') parser.add_argument('revision', type=str, help="A commit or tag that is accepted by git to test against") parser.add_argument('--tmp-path', type=str, default=None, help="The path to checkout old Buildbot version to") parser.add_argument('--dont-clean', action='store_true', default=False, help="If set, the temporary buildbot checkout will not be deleted") args = parser.parse_args() curr_buildbot_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) if args.tmp_path is not None: test_buildbot_root = args.tmp_path else: test_buildbot_root = os.path.join(curr_buildbot_root, 'tmp-buildbot-smokedist') print('Using {} as temporary path for buildbot checkout'.format(test_buildbot_root)) checkout_buildbot_at_revision(curr_buildbot_root, test_buildbot_root, args.revision) install_local_dependencies(curr_buildbot_root, test_buildbot_root) run_test(test_buildbot_root) if not args.dont_clean: shutil.rmtree(test_buildbot_root) if __name__ == '__main__': main() buildbot-2.6.0/common/smokedist.sh000077500000000000000000000007731361162603000171610ustar00rootroot00000000000000#!/bin/bash if [ -z $1 ]; then suffixes="whl tar.gz" else suffixes=$1 fi set -e for suffix in $suffixes do VE=sandbox.$suffix rm -rf $VE if [ -z "$python" ]; then virtualenv --python python3 $VE else virtualenv --python python$python $VE fi . $VE/bin/activate pip install -U pip pip install mock requests flask pip install dist/buildbot-2*.$suffix pip install dist/buildbot?pkg*.$suffix pip install dist/*.$suffix smokes/run.sh done buildbot-2.6.0/common/validate.sh000077500000000000000000000200511361162603000167370ustar00rootroot00000000000000#! /bin/bash TEST='buildbot.test buildbot_worker.test' # if stdout is a terminal define some colors # validate.sh can be run as hook from GUI git clients, such as git-gui if test -t 1; then # plain _ESC=$'\e' GREEN="$_ESC[0;32m" MAGENTA="$_ESC[0;35m" RED="$_ESC[0;31m" LTCYAN="$_ESC[1;36m" YELLOW="$_ESC[1;33m" NORM="$_ESC[0;0m" fi ## parse options quick=false no_js=false help=false while [ $# -gt 0 ]; do case $1 in --quick) quick=true ;; --no-js) no_js=true ;; --help) help=true ;; -*) echo "$0: error - unrecognized option $1" 1>&2; help=true ;; *) REVRANGE="$1..HEAD" ;; esac shift done if $help; then echo "USAGE: common/validate.sh [oldrev] [--quick] [--no-js] [--help]" echo " This script will test a set of patches (oldrev..HEAD) for basic acceptability as a patch" echo " Run it in an activated virtualenv with the current Buildbot installed, as well as" echo " sphinx, flake8, mock, and so on" echo "To use a different directory for tests, pass TRIALTMP=/path as an env variable" echo "if --quick is passed validate will skip unit tests and concentrate on coding style" echo "if --no-js is passed validate will skip tests that require Node and NPM" echo "if --help is passed validate will output this message and exit" echo "if no oldrev is passed validate will assume master...HEAD" exit 1 fi [ -z "$REVRANGE" ] && REVRANGE="master..HEAD" status() { echo "${LTCYAN}-- ${*} --${NORM}" } ok=true problem_summary="" not_ok() { ok=false echo "${RED}** ${*} **${NORM}" problem_summary="$problem_summary"$'\n'"${RED}**${NORM} ${*}" } warning() { echo "${YELLOW}** ${*} **${NORM}" problem_summary="$problem_summary"$'\n'"${YELLOW}**${NORM} ${*} (warning)" } check_tabs() { git diff "$REVRANGE" | grep -q $'+.*\t' } check_long_lines() { # only check python files local long_lines=false for f in $(git diff --name-only --stat "$REVRANGE" | grep '.py$'); do # don't try to check removed files [ ! -f "$f" ] && continue if [ $(git diff "$REVRANGE" $f | grep -E -c '^\+.{80}') != 0 ]; then echo " $f" long_lines=true fi done $long_lines } check_yield_defer_returnValue() { local yields=false if git diff "$REVRANGE" | grep '+.*yield defer.returnValue'; then yields=true fi $yields } check_relnotes() { if git diff --exit-code "$REVRANGE" master/docs/relnotes/index.rst >/dev/null 2>&1; then return 1 else return 0 fi } check_sa_Table() { local bad_files=$(git grep -l 'sa\.Table(' | grep '\.py$' | grep -v '^master/buildbot/util/sautils\.py$') if [ -n "${bad_files}" ]; then echo "${YELLOW}Source files found containing 'sa.Table':${NORM}" for f in $bad_files; do echo " ${YELLOW}*${NORM} $f" done echo "${YELLOW}import buildbot.util.sautils and use sautils.Table instead.${NORM}" return 1 fi return 0 } run_tests() { if [ -n "${TRIALTMP}" ]; then TEMP_DIRECTORY_OPT="--temp-directory ${TRIALTMP}" else warning "please provide a TRIALTMP env variable pointing to a ramfs for 30x speed up of the integration tests" fi find . -name \*.pyc -exec rm {} \; trial --reporter text ${TEMP_DIRECTORY_OPT} ${TEST} } if ! git diff --no-ext-diff --quiet --exit-code; then not_ok "changed files in working copy" if ! $quick; then exit 1 fi fi # get a list of changed files, used below; this uses a tempfile to work around # shell behavior when piping to 'while' tempfile=$(mktemp -t tmp.XXXXXX) trap "rm -f ${tempfile}; exit 1" 1 2 3 15 git diff --name-only $REVRANGE | grep '\.py$' | grep -v '\(^master/docs\|/setup\.py\)' > ${tempfile} py_files=() while read line; do if test -f "${line}"; then py_files+=($line) fi done < ${tempfile} echo "${MAGENTA}Validating the following commits:${NORM}" git log "$REVRANGE" --pretty=oneline || exit 1 if ! $quick && ! $no_js; then for module in www/base www/console_view www/grid_view www/waterfall_view www/codeparameter www/wsgi_dashboards; do status "running 'setup.py develop' for $module" if ! (cd $module; python setup.py develop >/dev/null ); then warning "$module/setup.py failed; retrying with cleared libs/" rm -rf "$module/libs" (cd $module; python setup.py develop >/dev/null ) || not_ok "$module/setup.py failed" fi done else warning "Skipping JavaScript Tests" fi if ! $quick; then status "running Python tests" run_tests || not_ok "Python tests failed" elif [ -z `command -v cctrial` ]; then warning "Skipping Python Tests ('pip install cctrial' for quick tests)" else cctrial -H buildbot buildbot_worker || not_ok "Python tests failed" fi status "checking formatting" check_tabs && not_ok "$REVRANGE adds tabs" check_long_lines && warning "$REVRANGE adds long lines" check_yield_defer_returnValue && not_ok "$REVRANGE yields defer.returnValue" status "checking for use of sa.Table" check_sa_Table || warning "use (buildbot.util.)sautils.Table instead of sa.Table" status "checking for release notes" check_relnotes || warning "$REVRANGE does not add release notes" if [ ${#py_files[@]} -ne 0 ]; then status "checking import module convention in modified files" if [[ -z `command -v isort` ]]; then warning "isort is not installed" else if ! isort ${py_files[@]}; then warning "unable to run isort on modified files" else if ! git diff --quiet --exit-code ${py_files[@]}; then not_ok "isort made changes" fi fi fi fi status "running autopep8" if [[ -z `command -v autopep8` ]]; then warning "autopep8 is not installed" elif [[ ! -f common/flake8rc ]]; then warning "common/flake8rc not found" else changes_made=false for filename in ${py_files[@]}; do LINEWIDTH=$(grep -E "max-line-length" common/flake8rc | sed 's/ //g' | cut -d'=' -f 2) # even if we don't enforce errors, if they can be fixed automatically, that's better.. IGNORES=E123,E501,W6 # ignore is not None for SQLAlchemy code.. if [[ "$filename" =~ "/db/" ]]; then IGNORES=$IGNORES,E711,E712 fi autopep8 --in-place --max-line-length=$LINEWIDTH --ignore=$IGNORES "$filename" if ! git diff --quiet --exit-code "$filename"; then changes_made=true fi done if ${changes_made}; then not_ok "autopep8 made changes" fi fi status "running flake8" if [[ -z `command -v flake8` ]]; then warning "flake8 is not installed" else flake8_ok=true for filename in ${py_files[@]}; do if ! flake8 --config=common/flake8rc "$filename"; then flake8_ok=false fi done $flake8_ok || not_ok "flake8 failed" fi status "running pylint" if [[ -z `command -v pylint` ]]; then warning "pylint is not installed" elif [[ ! -f common/pylintrc ]]; then warning "common/pylintrc not found" else pylint_ok=true for filename in ${py_files[@]}; do if ! pylint --rcfile=common/pylintrc --disable=R,line-too-long \ --enable=W0611 --output-format=text --reports=no \ --spelling-private-dict-file=common/code_spelling_ignore_words.txt \ "$filename"; then pylint_ok=false fi done $pylint_ok || not_ok "pylint failed" fi if git diff --name-only $REVRANGE | grep ^master/docs/ ; then status "building docs" # Don't clean builddir if built in quick mode if ! $quick ; then make -C master/docs clean || not_ok "docs cleanup failed" fi make -C master/docs VERSION=latest html || not_ok "docs failed" else status "not building docs, because it was not changed" fi echo "" if $ok; then if [ -z "${problem_summary}" ]; then echo "${GREEN}GOOD!${NORM}" else echo "${YELLOW}WARNINGS${NORM}${problem_summary}" fi exit 0 else echo "${RED}NO GOOD!${NORM}${problem_summary}" exit 1 fi buildbot-2.6.0/master/000077500000000000000000000000001361162603000146145ustar00rootroot00000000000000buildbot-2.6.0/master/.dockerignore000066400000000000000000000001321361162603000172640ustar00rootroot00000000000000*.pyc ./build _build node_modules docs/manual/mydashboard.html docs/manual/mydashboard.py buildbot-2.6.0/master/COPYING000066400000000000000000000354221361162603000156550ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS buildbot-2.6.0/master/CREDITS000066400000000000000000000075261361162603000156460ustar00rootroot00000000000000This is a list of everybody who has contributed to Buildbot in some way, in no particular order. Thanks everybody! A. T. Hofkamp Aaron Hsieh Abdelrahman Hussein Adam Collard Adam MacBeth Adam Sjøgren Adam Slater Adam Vandenberg Alexander Lorenz Alexander Staubo Aloisio Almeida Jr Amar Takhar Amber Yust Andi Albrecht Andreas Lawitzky Andrew Bennetts Andrew Bortz Andrew Melo Andrew Straw Andriy Senkovych Andy Howell Anthony Baxter Arkadiusz Miskiewicz Augie Fackler Aurélien Bompard Aviv Ben-Yosef Axel Hecht Baptiste Lepilleur Ben Bangert Ben Hearsum Benjamin Smedberg Benoit Sigoure Benoît Allard Bobby Impollonia Brad Hards Brandon Ehle Brandon Philips Brandon R. Stoner Brett Neely Brian Campbell Brian Warner Chad S Metcalf Charles Davis Charles Hardin Charles Lepple Chase Phillips Chris AtLee Chris Peyer Chris Rivera Chris Soyars Chris Templin Christian Lins Christian Unger Claude Vittoria Clement Stenac ClusterHQ Inc. Dan Kegel Dan Locks Dan Savilonis Dan Scott Daniel Dunbar Daniel Svensson Darragh Bailey Dave Abrahams Dave Liebreich Dave Peticolas David Adam (zanchey) Derek Hurley Dmitry Gladkov Dmitry Nezhevenko Dobes Vandermeer Doug Goldstein Doug Latornell Douglas Hubler Douglas Leeder Duncan Ferguson Dustin J. Mitchell Dustin Sallings Elliot Murphy Fabrice Crestois Federico G. Schwindt Filip Hautekeete François Poirotte Gabriele Giacone Gareth Armstrong Gary Granger Gary Poster Gavin McDonald Georges Racinet Georgi Valkov Gerald Combs Gerard Escalante Geraud Boyer Greg McNew Greg Ward Grig Gheorghiu Haavard Skinnemoen Harry Borkhuis Ian Zimmerman Igor Slepchin Iustin Pop Jakub Gustak James Knight James Porter James Tomson Jared Grubb Jared Morrow Jason Hoos Jay Soffian Jean-Paul Calderone Jeff Bailey Jeff Olson Jeremy Gill Jerome Davann Jochen Eisinger Johan Bergström John Backstrand John Carr John F Leach John Ford John O'Duinn John Pye John Saxton Johnnie Pittman Jon Olsson Jonathan Romero Jonathan S. Romero Jorge Gonzalez Jose Dapena Paz Joshua Kugler Joshua Olson Joshua Root Julien Boeuf Justin Wood KATO Kazuyoshi Karl Norby Kevin Turner Kirill Lapshin Kovarththanan Rajaratnam Kristian Nielsen Lital Natan Louis Opter Love Hörnquist Åstrand Loïc Minier Lukas Blakk Łukasz Jernaś Marc Abramowitz Marc Mengel Marc-Antoine Ruel Marcus Lindblom Marius Gedminas Mark A. Grondona Mark Dillavou Mark Hammond Mark Lakewood Mark Pauley Mark Rowe Mark Wielaard Martin Nordholts Mateusz Loskot Matisse Enzer Matt Heitzenroder Matt Whiteley Matthew Scott Matthew Jacobi Mattias Brändström Michael Haggerty Michael Lyle Michael MacDonald Michael Stapelberg Michał Šrajer Mihai Parparita Mikael Lind Mike "Bear" Taylor Mikhail Gusarov Mirko Boehm Monty Taylor Nathaniel Smith Nate Bragg Neal Norwitz Neil Hemingway Nick Mathewson Nick Mills Nick Trout Nicolas Sylvain Nicolás Alvarez Niklaus Giger Olivier Bonnet Olly Betts P. Christeas Pam Selle Patrick Gansterer Paul Warren Paul Winkler Phil Thompson Philipp Frauenfelder Philippe McLean Pierre Tardy Piotr Sikora Pradeepkumar Gayam Quentin Raynaud Rafaël Carré Randall Bosetti Renato Alves Rene Müller Rene Rivera Riccardo Magliocchetti Richard Holden Richard Levitte Rob Helmer Robert Collins Robert Iannucci Robin Eckert Saurabh Kumar Satya Graha Scott Garman Scott Lamb Scott Lawrence Seo Sanghyeon Sergey Lipnevich Shawn Chin Shimizukawa Sidnei da Silva Simon Kennedy Stanislav Kupryakhin Stefan Marr Stefan Seefeld Stefan Zager Stephen Davis Steve "Ashcrow" Milner Steven Walter Stuart Auchterlonie Ted Mielczarek Terence Haddock Thijs Triemstra Thomas Moschny Thomas Vander Stichele Tim Hatch Timothy Fitz Tobi Vollebregt Tobias Oberstein Tom Fogal Tom Prince Tom Wardill Tomaz Muraus Umesh Patel Unknown tagger Wade Brainerd Wanner Markus William Deegan William Siegrist Yoz Grahame Zandr Milewski Zellyn Hunter Zooko Wilcox-O'Hearn Konstantinos Koukopoulos Name Unknown: adam chops code gollum gv lurker99 strank buildbot-2.6.0/master/Dockerfile000066400000000000000000000057551361162603000166220ustar00rootroot00000000000000# buildbot/buildbot-master # please follow docker best practices # https://docs.docker.com/engine/userguide/eng-image/dockerfile_best-practices/ # # Use a multi-stage build: # https://docs.docker.com/develop/develop-images/multistage-build/ # # Provide an intermediate Docker image named "buildbot-build". # This intermediate image builds binary wheels # which get installed in the final image. # This allows us to avoid installing build tools like gcc in the final image. FROM alpine:3.7 AS buildbot-build MAINTAINER Buildbot maintainers # Last build date - this can be updated whenever there are security updates so # that everything is rebuilt ENV security_updates_as_of 2018-04-10 COPY . /usr/src/buildbot RUN \ echo @edgecommunity http://nl.alpinelinux.org/alpine/edge/community >> /etc/apk/repositories && \ echo @testing http://nl.alpinelinux.org/alpine/edge/testing >> /etc/apk/repositories && \ apk add --no-cache \ git \ python3-dev \ libffi-dev \ libressl-dev \ postgresql-dev \ py3-pip \ alpine-sdk \ tar \ tzdata \ curl && \ # install pip dependencies pip3 install --upgrade pip setuptools wheel && \ pip3 install "buildbot[bundle,tls]" && \ pip3 install -r /usr/src/buildbot/requirements-docker-extras.txt && \ pip3 install "/usr/src/buildbot" && \ mkdir -p wheels && \ pip3 list --format freeze | grep -v '^buildbot=' > wheels/wheels.txt && \ cd wheels && \ pip3 wheel -r wheels.txt #============================================================================================== # Build the final image here. Use build artifacts from the buildbot-build # container. # Note that the UI and worker packages are the latest version published on pypi # This is to avoid pulling node inside this container FROM alpine:3.7 MAINTAINER Buildbot maintainers # Last build date - this can be updated whenever there are security updates so # that everything is rebuilt ENV security_updates_as_of 2018-04-11 COPY . /usr/src/buildbot # Build wheels in other container using the Dockerfile.build # and copy them into this container. # We do this to avoid having to pull gcc for building native extensions. COPY --from=buildbot-build /wheels /wheels RUN \ echo @edgecommunity http://nl.alpinelinux.org/alpine/edge/community >> /etc/apk/repositories && \ echo @testing http://nl.alpinelinux.org/alpine/edge/testing >> /etc/apk/repositories && \ apk add --no-cache \ git \ python3 \ postgresql-libs \ py3-pip \ gosu@testing \ dumb-init \ tar \ tzdata \ curl \ openssh && \ # install pip dependencies pip3 install --upgrade pip setuptools && \ pip3 install /wheels/*.whl && \ pip3 install "buildbot[bundle,tls]" && \ pip3 install "/usr/src/buildbot" && \ rm -r /root/.cache /wheels WORKDIR /var/lib/buildbot CMD ["dumb-init", "/usr/src/buildbot/docker/start_buildbot.sh"] buildbot-2.6.0/master/MANIFEST.in000066400000000000000000000022711361162603000163540ustar00rootroot00000000000000include MANIFEST.in README.rst CREDITS COPYING UPGRADING include docs/examples/*.cfg include docs/conf.py include docs/Makefile include docs/buildbot.1 include docs/*.rst include docs/_images/* include docs/_static/* include docs/_templates/* include docs/_themes/qtile/* include docs/_themes/qtile/static/* include docs/tutorial/*.rst include docs/tutorial/_images/*.png include docs/manual/*.rst include docs/manual/_images/*.svg include docs/manual/_images/*.png include docs/manual/_images/*.txt include docs/manual/_images/icon.blend include docs/manual/_images/Makefile include docs/manual/installation/*.rst include docs/bbdocs/*.py include docs/developer/* include docs/developer/_images/* include docs/relnotes/* include buildbot/scripts/sample.cfg include buildbot/scripts/buildbot_tac.tmpl include buildbot/reporters/templates/*.txt include buildbot/spec/api.raml include buildbot/spec/types/*.raml include buildbot/db/migrate/README buildbot/db/migrate/migrate.cfg include contrib/* contrib/windows/* contrib/os-x/* contrib/css/* contrib/libvirt/* include contrib/trac/* contrib/trac/bbwatcher/* contrib/trac/bbwatcher/templates/* include contrib/init-scripts/* contrib/bash/* contrib/zsh/* buildbot-2.6.0/master/Makefile000066400000000000000000000004121361162603000162510ustar00rootroot00000000000000# developer utilities pylint: pylint -j4 --rcfile=../common/pylintrc buildbot @test ! -f fail tutorial: cd docs/tutorial; $(MAKE) html flake8: flake8 --config=../common/flake8rc buildbot flake8 --config=../common/flake8rc docs/conf.py rmpyc: make -C .. rmpyc buildbot-2.6.0/master/README.rst000066400000000000000000000071671361162603000163160ustar00rootroot00000000000000Buildbot: The Continuous Integration Framework ============================================== :Site: https://buildbot.net :Original author: Brian Warner :Current maintainer: `The Botherders `_. .. contents:: :local: Buildbot is an open-source continuous integration framework for automating software build, test, and release processes. * Buildbot is easy to set up, but very extensible and customizable. It supports arbitrary build processes, and is not limited to common build processes for particular languages (e.g., autotools or ant) * Buildbot supports building and testing on a variety of platforms. Developers, who do not have the facilities to test their changes everywhere before committing, will know shortly afterwards whether they have broken the build or not. * Buildbot allows to track various metrics (warning counts, lint checks, image size, compile time, etc) over time. * Buildbot has minimal requirements for workers: using virtualenv, only a Python installation is required. * Workers can be run behind a NAT firewall and communicate with the master. * Buildbot has a variety of status-reporting tools to get information about builds in front of developers in a timely manner. Documentation ------------- See https://docs.buildbot.net/current/ for documentation of the current version of Buildbot. Docker container ---------------- Buildbot comes with a ready to use docker container available at buildbot/buildbot-master Following environment variables are supported for configuration: * ``BUILDBOT_CONFIG_URL``: http url to a config tarball. The tarball must be in the .tar.gz format. The tarball must contain a directory, which will contain a master.cfg file in it. The tarball may contain a twisted.tac file in it, which can be used to configure the twisted logging system (e.g to log in logstash instead of the default stdout). The tarball will be extracted in a directory named ``$BUILDBOT_CONFIG_DIR`` in the master directory, and can contain additional python module that the master.cfg can load. If ``BUILDBOT_CONFIG_URL`` does not end with .tar.gz, it is considered to be an URL to the direct ``master.cfg`` * ``BUILDBOT_CONFIG_DIR`` directory where to extract the config tarball within the master directory. It is important so that you can do relative imports in your master.cfg like it is done in the metabbotcfg (https://github.com/buildbot/metabbotcfg) Requirements ------------ See https://docs.buildbot.net/current/manual/installation/index.html Briefly: python, Twisted, Jinja2, simplejson, and SQLite. Simplejson and SQLite are included with recent versions of Python. Contributing ------------- Please send your patches to https://github.com/buildbot/buildbot/ Support ------- Please send questions, file bugs, etc, on the Buildbot Github project https://github.com/buildbot/buildbot/issues. Alternatively, write to the buildbot-devel mailing list reachable through https://buildbot.net/. Copying ------- Buildbot is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. For full details, please see the file named COPYING in the top directory of the source tree. You should have received a copy of the GNU General Public License along with this program. If not, see . buildbot-2.6.0/master/UPGRADING000066400000000000000000000002661361162603000160630ustar00rootroot00000000000000For information on ugprading Buildbot, see the section "Upgrading an Existing Buildmaster" in the buildbot documentation. This may be found locally in docs/manual/installation.rst. buildbot-2.6.0/master/buildbot/000077500000000000000000000000001361162603000164205ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/__init__.py000066400000000000000000000106731361162603000205400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # We can't put this method in utility modules, because they import dependency packages import datetime import os import re from subprocess import PIPE from subprocess import STDOUT from subprocess import Popen def gitDescribeToPep440(version): # git describe produce version in the form: v0.9.8-20-gf0f45ca # where 20 is the number of commit since last release, and gf0f45ca is the short commit id preceded by 'g' # we parse this a transform into a pep440 release version 0.9.9.dev20 (increment last digit and add dev before 20) VERSION_MATCH = re.compile(r'(?P\d+)\.(?P\d+)\.(?P\d+)(\.post(?P\d+))?(-(?P\d+))?(-g(?P.+))?') v = VERSION_MATCH.search(version) if v: major = int(v.group('major')) minor = int(v.group('minor')) patch = int(v.group('patch')) if v.group('dev'): patch += 1 dev = int(v.group('dev')) return "{}.{}.{}-dev{}".format(major, minor, patch, dev) if v.group('post'): return "{}.{}.{}.post{}".format(major, minor, patch, v.group('post')) return "{}.{}.{}".format(major, minor, patch) return v def mTimeVersion(init_file): cwd = os.path.dirname(os.path.abspath(init_file)) m = 0 for root, dirs, files in os.walk(cwd): for f in files: m = max(os.path.getmtime(os.path.join(root, f)), m) d = datetime.datetime.utcfromtimestamp(m) return d.strftime("%Y.%m.%d") def getVersionFromArchiveId(git_archive_id='1579625496 (tag: v2.6.0)'): """ Extract the tag if a source is from git archive. When source is exported via `git archive`, the git_archive_id init value is modified and placeholders are expanded to the "archived" revision: %ct: committer date, UNIX timestamp %d: ref names, like the --decorate option of git-log See man gitattributes(5) and git-log(1) (PRETTY FORMATS) for more details. """ # mangle the magic string to make sure it is not replaced by git archive if not git_archive_id.startswith('$For''mat:'): # source was modified by git archive, try to parse the version from # the value of git_archive_id match = re.search(r'tag:\s*v([^,)]+)', git_archive_id) if match: # archived revision is tagged, use the tag return gitDescribeToPep440(match.group(1)) # archived revision is not tagged, use the commit date tstamp = git_archive_id.strip().split()[0] d = datetime.datetime.utcfromtimestamp(int(tstamp)) return d.strftime('%Y.%m.%d') return None def getVersion(init_file): """ Return BUILDBOT_VERSION environment variable, content of VERSION file, git tag or 'latest' """ try: return os.environ['BUILDBOT_VERSION'] except KeyError: pass try: cwd = os.path.dirname(os.path.abspath(init_file)) fn = os.path.join(cwd, 'VERSION') with open(fn) as f: return f.read().strip() except IOError: pass version = getVersionFromArchiveId() if version is not None: return version try: p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd) out = p.communicate()[0] if (not p.returncode) and out: v = gitDescribeToPep440(str(out)) if v: return v except OSError: pass try: # if we really can't find the version, we use the date of modification of the most recent file # docker hub builds cannot use git describe return mTimeVersion(init_file) except Exception: # bummer. lets report something return "latest" version = getVersion(__file__) __version__ = version buildbot-2.6.0/master/buildbot/buildbot_net_usage_data.py000066400000000000000000000170021361162603000236210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This files implement buildbotNetUsageData options It uses urllib instead of requests in order to avoid requiring another dependency for statistics feature. urllib supports http_proxy already. urllib is blocking and thus everything is done from a thread. """ import hashlib import inspect import json import os import platform import socket from urllib import error as urllib_error from urllib import request as urllib_request from twisted.internet import threads from twisted.python import log from buildbot.process.buildstep import _BuildStepFactory from buildbot.util import unicode2bytes from buildbot.www.config import IndexResource # This can't change! or we will need to make sure we are compatible with all # released version of buildbot >=0.9.0 PHONE_HOME_URL = "https://events.buildbot.net/events/phone_home" def linux_distribution(): os_release = "/etc/os-release" meta_data = {'ID': "unknown_linux", 'VERSION_ID': "unknown_version"} if os.path.exists(os_release): with open("/etc/os-release") as f: for line in f: try: k, v = line.strip().split("=") meta_data[k] = v.strip('""') except Exception: pass return meta_data['ID'], meta_data['VERSION_ID'] def get_distro(): system = platform.system() if system == "Linux": dist = linux_distribution() return "{}:{}".format(dist[0], dist[1]) elif system == "Windows": dist = platform.win32_ver() return "{}:{}".format(dist[0], dist[1]) elif system == "Java": dist = platform.java_ver() return "{}:{}".format(dist[0], dist[1]) elif system == "Darwin": dist = platform.mac_ver() return "{}".format(dist[0]) # else: return ":".join(platform.uname()[0:1]) def getName(obj): """This method finds the first parent class which is within the buildbot namespace it prepends the name with as many ">" as the class is subclassed """ # elastic search does not like '.' in dict keys, so we replace by / def sanitize(name): return name.replace(".", "/") if isinstance(obj, _BuildStepFactory): klass = obj.factory else: klass = type(obj) name = "" klasses = (klass, ) + inspect.getmro(klass) for klass in klasses: if hasattr(klass, "__module__") and klass.__module__.startswith("buildbot."): return sanitize(name + klass.__module__ + "." + klass.__name__) else: name += ">" return sanitize(type(obj).__name__) def countPlugins(plugins_uses, lst): if isinstance(lst, dict): lst = lst.values() for i in lst: name = getName(i) plugins_uses.setdefault(name, 0) plugins_uses[name] += 1 def basicData(master): plugins_uses = {} countPlugins(plugins_uses, master.config.workers) countPlugins(plugins_uses, master.config.builders) countPlugins(plugins_uses, master.config.schedulers) countPlugins(plugins_uses, master.config.services) countPlugins(plugins_uses, master.config.change_sources) for b in master.config.builders: countPlugins(plugins_uses, b.factory.steps) # we hash the master's name + various other master dependent variables # to get as much as possible an unique id # we hash it to not leak private information about the installation such as hostnames and domain names hashInput = ( master.name + # master name contains hostname + master basepath socket.getfqdn() # we add the fqdn to account for people # call their buildbot host 'buildbot' # and install it in /var/lib/buildbot ) hashInput = unicode2bytes(hashInput) installid = hashlib.sha1(hashInput).hexdigest() return { 'installid': installid, 'versions': dict(IndexResource.getEnvironmentVersions()), 'platform': { 'platform': platform.platform(), 'system': platform.system(), 'machine': platform.machine(), 'processor': platform.processor(), 'python_implementation': platform.python_implementation(), # xBSD including osx will disclose too much information after [4] like where it was built 'version': " ".join(platform.version().split(' ')[:4]), 'distro': get_distro() }, 'plugins': plugins_uses, 'db': master.config.db['db_url'].split("://")[0], 'mq': master.config.mq['type'], 'www_plugins': list(master.config.www['plugins'].keys()) } def fullData(master): """ Send the actual configuration of the builders, how the steps are agenced. Note that full data will never send actual detail of what command is run, name of servers, etc. """ builders = [] for b in master.config.builders: steps = [] for step in b.factory.steps: steps.append(getName(step)) builders.append(steps) return {'builders': builders} def computeUsageData(master): if master.config.buildbotNetUsageData is None: return data = basicData(master) if master.config.buildbotNetUsageData != "basic": data.update(fullData(master)) if callable(master.config.buildbotNetUsageData): data = master.config.buildbotNetUsageData(data) return data def _sendWithUrlib(url, data): data = json.dumps(data).encode() clen = len(data) req = urllib_request.Request(url, data, { 'Content-Type': 'application/json', 'Content-Length': clen }) try: f = urllib_request.urlopen(req) except urllib_error.URLError: return None res = f.read() f.close() return res def _sendWithRequests(url, data): try: import requests # pylint: disable=import-outside-toplevel except ImportError: return None r = requests.post(url, json=data) return r.text def _sendBuildbotNetUsageData(data): log.msg("buildbotNetUsageData: sending {}".format(data)) # first try with requests, as this is the most stable http library res = _sendWithRequests(PHONE_HOME_URL, data) # then we try with stdlib, which not always work with https if res is None: res = _sendWithUrlib(PHONE_HOME_URL, data) # at last stage if res is None: log.msg("buildbotNetUsageData: Could not send using https, " "please `pip install 'requests[security]'` for proper SSL implementation`") data['buggySSL'] = True res = _sendWithUrlib(PHONE_HOME_URL.replace("https://", "http://"), data) log.msg("buildbotNetUsageData: buildbot.net said:", res) def sendBuildbotNetUsageData(master): if master.config.buildbotNetUsageData is None: return data = computeUsageData(master) if data is None: return threads.deferToThread(_sendBuildbotNetUsageData, data) buildbot-2.6.0/master/buildbot/buildrequest.py000066400000000000000000000014321361162603000215020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.buildrequest import BuildRequest _hush_pyflakes = [BuildRequest] buildbot-2.6.0/master/buildbot/changes/000077500000000000000000000000001361162603000200305ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/changes/__init__.py000066400000000000000000000000001361162603000221270ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/changes/base.py000066400000000000000000000076171361162603000213270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot.interfaces import IChangeSource from buildbot.util import service from buildbot.util.poll import method as poll_method @implementer(IChangeSource) class ChangeSource(service.ClusteredBuildbotService): def describe(self): pass # activity handling def activate(self): return defer.succeed(None) def deactivate(self): return defer.succeed(None) # service handling def _getServiceId(self): return self.master.data.updates.findChangeSourceId(self.name) def _claimService(self): return self.master.data.updates.trySetChangeSourceMaster(self.serviceid, self.master.masterid) def _unclaimService(self): return self.master.data.updates.trySetChangeSourceMaster(self.serviceid, None) class ReconfigurablePollingChangeSource(ChangeSource): pollInterval = None pollAtLaunch = None def checkConfig(self, name=None, pollInterval=60 * 10, pollAtLaunch=False): super().checkConfig(name=name) if pollInterval < 0: config.error("interval must be >= 0: {}".format(pollInterval)) @defer.inlineCallbacks def reconfigService(self, name=None, pollInterval=60 * 10, pollAtLaunch=False): self.pollInterval, prevPollInterval = pollInterval, self.pollInterval self.pollAtLaunch = pollAtLaunch yield super().reconfigService(name=name) # pollInterval change is the only value which makes sense to reconfigure check. if prevPollInterval != pollInterval and self.doPoll.started: yield self.doPoll.stop() # As a implementation detail, poller will 'pollAtReconfigure' if poll interval changes # and pollAtLaunch=True yield self.doPoll.start(interval=self.pollInterval, now=self.pollAtLaunch) def poll(self): pass @poll_method def doPoll(self): d = defer.maybeDeferred(self.poll) d.addErrback(log.err, 'while polling for changes') return d def force(self): self.doPoll() def activate(self): self.doPoll.start(interval=self.pollInterval, now=self.pollAtLaunch) def deactivate(self): return self.doPoll.stop() class PollingChangeSource(ReconfigurablePollingChangeSource): # Legacy code will be very painful to port to BuildbotService life cycle # because the unit tests keep doing shortcuts for the Service life cycle (i.e by no calling startService) # instead of porting everything at once, we make a class to support legacy def checkConfig(self, name=None, pollInterval=60 * 10, pollAtLaunch=False, **kwargs): super().checkConfig(name=name, pollInterval=60 * 10, pollAtLaunch=False) self.pollInterval = pollInterval self.pollAtLaunch = pollAtLaunch def reconfigService(self, *args, **kwargs): # BuildbotServiceManager will detect such exception and swap old service with new service, # instead of just reconfiguring raise NotImplementedError() buildbot-2.6.0/master/buildbot/changes/bitbucket.py000066400000000000000000000174101361162603000223610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import time from datetime import datetime from twisted.internet import defer from twisted.python import log from twisted.web import client from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import deferredLocked from buildbot.util import epoch2datetime _UNSPECIFIED = object() class BitbucketPullrequestPoller(base.PollingChangeSource): compare_attrs = ("owner", "slug", "branch", "pollInterval", "useTimestamps", "category", "project", "pollAtLaunch") db_class_name = 'BitbucketPullrequestPoller' def __init__(self, owner, slug, branch=None, pollInterval=10 * 60, useTimestamps=True, category=None, project='', pullrequest_filter=True, encoding=_UNSPECIFIED, pollAtLaunch=False ): self.owner = owner self.slug = slug self.branch = branch super().__init__(name='/'.join([owner, slug]), pollInterval=pollInterval, pollAtLaunch=pollAtLaunch) if encoding != _UNSPECIFIED: config.warnDeprecated('2.6.0', 'encoding of BitbucketPullrequestPoller is deprecated.') if hasattr(pullrequest_filter, '__call__'): self.pullrequest_filter = pullrequest_filter else: self.pullrequest_filter = (lambda _: pullrequest_filter) self.lastChange = time.time() self.lastPoll = time.time() self.useTimestamps = useTimestamps self.category = category if callable( category) else bytes2unicode(category) self.project = bytes2unicode(project) self.initLock = defer.DeferredLock() def describe(self): return "BitbucketPullrequestPoller watching the "\ "Bitbucket repository %s/%s, branch: %s" % ( self.owner, self.slug, self.branch) @deferredLocked('initLock') def poll(self): d = self._getChanges() d.addCallback(self._processChanges) d.addErrback(self._processChangesFailure) return d def _getChanges(self): self.lastPoll = time.time() log.msg("BitbucketPullrequestPoller: polling " "Bitbucket repository %s/%s, branch: %s" % (self.owner, self.slug, self.branch)) url = "https://bitbucket.org/api/2.0/repositories/%s/%s/pullrequests" % ( self.owner, self.slug) return client.getPage(url, timeout=self.pollInterval) @defer.inlineCallbacks def _processChanges(self, page): result = json.loads(page) for pr in result['values']: branch = pr['source']['branch']['name'] nr = int(pr['id']) # Note that this is a short hash. The full length hash can be accessed via the # commit api resource but we want to avoid requesting multiple pages as long as # we are not sure that the pull request is new or updated. revision = pr['source']['commit']['hash'] # check branch if not self.branch or branch in self.branch: current = yield self._getCurrentRev(nr) # compare _short_ hashes to check if the PR has been updated if not current or current[0:12] != revision[0:12]: # parse pull request api page (required for the filter) page = yield client.getPage(str(pr['links']['self']['href'])) pr_json = json.loads(page) # filter pull requests by user function if not self.pullrequest_filter(pr_json): log.msg('pull request does not match filter') continue # access additional information author = pr['author']['display_name'] prlink = pr['links']['html']['href'] # Get time updated time. Note that the timezone offset is # ignored. if self.useTimestamps: updated = datetime.strptime( pr['updated_on'].split('.')[0], '%Y-%m-%dT%H:%M:%S') else: updated = epoch2datetime(self.master.reactor.seconds()) title = pr['title'] # parse commit api page page = yield client.getPage(str(pr['source']['commit']['links']['self']['href'])) commit_json = json.loads(page) # use the full-length hash from now on revision = commit_json['hash'] revlink = commit_json['links']['html']['href'] # parse repo api page page = yield client.getPage(str(pr['source']['repository']['links']['self']['href'])) repo_json = json.loads(page) repo = repo_json['links']['html']['href'] # update database yield self._setCurrentRev(nr, revision) # emit the change yield self.master.data.updates.addChange( author=bytes2unicode(author), committer=None, revision=bytes2unicode(revision), revlink=bytes2unicode(revlink), comments='pull-request #%d: %s\n%s' % ( nr, title, prlink), when_timestamp=datetime2epoch(updated), branch=bytes2unicode(branch), category=self.category, project=self.project, repository=bytes2unicode(repo), src='bitbucket', ) def _processChangesFailure(self, f): log.msg('BitbucketPullrequestPoller: json api poll failed') log.err(f) # eat the failure to continue along the deferred chain - we still want # to catch up return None def _getCurrentRev(self, pr_id): # Return a deferred datetime object for the given pull request number # or None. d = self._getStateObjectId() @d.addCallback def oid_callback(oid): current = self.master.db.state.getState( oid, 'pull_request%d' % pr_id, None) @current.addCallback def result_callback(result): return result return current return d def _setCurrentRev(self, pr_id, rev): # Set the datetime entry for a specified pull request. d = self._getStateObjectId() @d.addCallback def oid_callback(oid): return self.master.db.state.setState(oid, 'pull_request%d' % pr_id, rev) return d def _getStateObjectId(self): # Return a deferred for object id in state db. return self.master.db.state.getObjectId( '%s/%s#%s' % (self.owner, self.slug, self.branch), self.db_class_name) buildbot-2.6.0/master/buildbot/changes/changes.py000066400000000000000000000165041361162603000220200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import html # py2: via future import time from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot import util from buildbot.process.properties import Properties from buildbot.util import datetime2epoch @implementer(interfaces.IStatusEvent) class Change: """I represent a single change to the source tree. This may involve several files, but they are all changed by the same person, and there is a change comment for the group as a whole.""" number = None branch = None category = None revision = None # used to create a source-stamp links = [] # links are gone, but upgrade code expects this attribute @classmethod def fromChdict(cls, master, chdict): """ Class method to create a L{Change} from a dictionary as returned by L{ChangesConnectorComponent.getChange}. @param master: build master instance @param ssdict: change dictionary @returns: L{Change} via Deferred """ cache = master.caches.get_cache("Changes", cls._make_ch) return cache.get(chdict['changeid'], chdict=chdict, master=master) @classmethod def _make_ch(cls, changeid, master, chdict): change = cls(None, None, None, _fromChdict=True) change.who = chdict['author'] change.committer = chdict['committer'] change.comments = chdict['comments'] change.revision = chdict['revision'] change.branch = chdict['branch'] change.category = chdict['category'] change.revlink = chdict['revlink'] change.repository = chdict['repository'] change.codebase = chdict['codebase'] change.project = chdict['project'] change.number = chdict['changeid'] when = chdict['when_timestamp'] if when: when = datetime2epoch(when) change.when = when change.files = sorted(chdict['files']) change.properties = Properties() for n, (v, s) in chdict['properties'].items(): change.properties.setProperty(n, v, s) return defer.succeed(change) def __init__(self, who, files, comments, committer=None, revision=None, when=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', _fromChdict=False): if properties is None: properties = {} # skip all this madness if we're being built from the database if _fromChdict: return self.who = who self.committer = committer self.comments = comments def none_or_unicode(x): if x is None: return x return str(x) self.revision = none_or_unicode(revision) now = util.now() if when is None: self.when = now elif when > now: # this happens when the committing system has an incorrect clock, for example. # handle it gracefully log.msg( "received a Change with when > now; assuming the change happened now") self.when = now else: self.when = when self.branch = none_or_unicode(branch) self.category = none_or_unicode(category) self.revlink = revlink self.properties = Properties() self.properties.update(properties, "Change") self.repository = repository self.codebase = codebase self.project = project # keep a sorted list of the files, for easier display self.files = sorted(files or []) def __setstate__(self, dict): self.__dict__ = dict # Older Changes won't have a 'properties' attribute in them if not hasattr(self, 'properties'): self.properties = Properties() if not hasattr(self, 'revlink'): self.revlink = "" def __str__(self): return ("Change(revision=%r, who=%r, committer=%r, branch=%r, comments=%r, " + "when=%r, category=%r, project=%r, repository=%r, " + "codebase=%r)") % ( self.revision, self.who, self.committer, self.branch, self.comments, self.when, self.category, self.project, self.repository, self.codebase) def __eq__(self, other): return self.number == other.number def __ne__(self, other): return self.number != other.number def __lt__(self, other): return self.number < other.number def __le__(self, other): return self.number <= other.number def __gt__(self, other): return self.number > other.number def __ge__(self, other): return self.number >= other.number def asText(self): data = "" data += "Files:\n" for f in self.files: data += " %s\n" % f if self.repository: data += "On: %s\n" % self.repository if self.project: data += "For: %s\n" % self.project data += "At: %s\n" % self.getTime() data += "Changed By: %s\n" % self.who data += "Committed By: %s\n" % self.committer data += "Comments: %s" % self.comments data += "Properties: \n" for prop in self.properties.asList(): data += " %s: %s" % (prop[0], prop[1]) data += '\n\n' return data def asDict(self): '''returns a dictionary with suitable info for html/mail rendering''' files = [dict(name=f) for f in self.files] files.sort(key=lambda a: a['name']) result = { # Constant 'number': self.number, 'branch': self.branch, 'category': self.category, 'who': self.getShortAuthor(), 'committer': self.committer, 'comments': self.comments, 'revision': self.revision, 'rev': self.revision, 'when': self.when, 'at': self.getTime(), 'files': files, 'revlink': getattr(self, 'revlink', None), 'properties': self.properties.asList(), 'repository': getattr(self, 'repository', None), 'codebase': getattr(self, 'codebase', ''), 'project': getattr(self, 'project', None) } return result def getShortAuthor(self): return self.who def getTime(self): if not self.when: return "?" return time.strftime("%a %d %b %Y %H:%M:%S", time.localtime(self.when)) def getTimes(self): return (self.when, None) def getText(self): return [html.escape(self.who)] def getLogs(self): return {} buildbot-2.6.0/master/buildbot/changes/filter.py000066400000000000000000000134731361162603000216770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot.util import ComparableMixin from buildbot.util import NotABranch class ChangeFilter(ComparableMixin): # NOTE: If users use a filter_fn, we have no way to determine whether it has # changed at reconfig, so the scheduler will always be restarted. That's as # good as Python can do. compare_attrs = ('filter_fn', 'checks') def __init__(self, # gets a Change object, returns boolean filter_fn=None, # change attribute comparisons: exact match to PROJECT, member of # list PROJECTS, regular expression match to PROJECT_RE, or # PROJECT_FN returns True when called with the project; repository, # branch, and so on are similar. Note that the regular expressions # are anchored to the first character of the string. For convenience, # a list can also be specified to the singular option (e.g, PROJECTS). project=None, project_re=None, project_fn=None, repository=None, repository_re=None, repository_fn=None, branch=NotABranch, branch_re=None, branch_fn=None, category=None, category_re=None, category_fn=None, codebase=None, codebase_re=None, codebase_fn=None): self.filter_fn = filter_fn self.checks = self.createChecks( (project, project_re, project_fn, "project"), (repository, repository_re, repository_fn, "repository"), (branch, branch_re, branch_fn, "branch"), (category, category_re, category_fn, "category"), (codebase, codebase_re, codebase_fn, "codebase"), ) def createChecks(self, *checks): def mklist(x): if x is not None and not isinstance(x, list): return [x] return x def mklist_br(x): # branch needs to be handled specially if x is NotABranch: return None if not isinstance(x, list): return [x] return x def mkre(r): if r is not None and not hasattr(r, 'match'): r = re.compile(r) return r ret = {} for filt_list, filt_re, filt_fn, chg_attr in checks: if "branch" in chg_attr: ret[chg_attr] = (mklist_br(filt_list), mkre(filt_re), filt_fn) else: ret[chg_attr] = (mklist(filt_list), mkre(filt_re), filt_fn) return ret def filter_change(self, change): if self.filter_fn is not None and not self.filter_fn(change): return False for chg_attr, (filt_list, filt_re, filt_fn) in self.checks.items(): if chg_attr.startswith("prop:"): chg_val = change.properties.getProperty( chg_attr.split(":", 1)[1], '') else: chg_val = getattr(change, chg_attr, '') if filt_list is not None and chg_val not in filt_list: return False if filt_re is not None and (chg_val is None or not filt_re.match(chg_val)): return False if filt_fn is not None and not filt_fn(chg_val): return False return True def __repr__(self): checks = [] for chg_attr, (filt_list, filt_re, filt_fn) in sorted(self.checks.items()): if filt_list is not None and len(filt_list) == 1: checks.append('%s == %s' % (chg_attr, filt_list[0])) elif filt_list is not None: checks.append('%s in %r' % (chg_attr, filt_list)) if filt_re is not None: checks.append('%s ~/%s/' % (chg_attr, filt_re)) if filt_fn is not None: checks.append('%s(%s)' % (filt_fn.__name__, chg_attr)) return "<%s on %s>" % (self.__class__.__name__, ' and '.join(checks)) @staticmethod def fromSchedulerConstructorArgs(change_filter=None, branch=NotABranch, categories=None): """ Static method to create a filter based on constructor args change_filter, branch, and categories; use default values @code{None}, @code{NotABranch}, and @code{None}, respectively. These arguments are interpreted as documented for the L{buildbot.schedulers.basic.Scheduler} class. @returns: L{ChangeFilter} instance or None for not filtering """ # use a change_filter, if given one if change_filter: if (branch is not NotABranch or categories is not None): raise RuntimeError("cannot specify both change_filter and " "branch or categories") return change_filter elif branch is not NotABranch or categories: # build a change filter from the deprecated category and branch # args cfargs = {} if branch is not NotABranch: cfargs['branch'] = branch if categories: cfargs['category'] = categories return ChangeFilter(**cfargs) else: return None buildbot-2.6.0/master/buildbot/changes/gerritchangesource.py000066400000000000000000000463361361162603000243010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import datetime import json from twisted.internet import defer from twisted.internet import reactor from twisted.internet import utils from twisted.internet.protocol import ProcessProtocol from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import base from buildbot.changes.filter import ChangeFilter from buildbot.util import bytes2unicode from buildbot.util import httpclientservice def _canonicalize_event(event): """ Return an event dictionary which is consistent between the gerrit event stream and the gerrit event log formats. """ # For "patchset-created" the events-log JSON looks like: # "project": {"name": "buildbot"} # while the stream-events JSON looks like: # "project": "buildbot" # so we canonicalize them to the latter if "change" not in event: return event change = event["change"] if "project" not in change: return event project = change["project"] if not isinstance(project, dict): return event if "name" not in project: return event event = copy.deepcopy(event) event["change"]["project"] = project["name"] return event class GerritChangeFilter(ChangeFilter): """This gerrit specific change filter helps creating pre-commit and post-commit builders""" def __init__(self, eventtype=None, eventtype_re=None, eventtype_fn=None, **kw): super().__init__(**kw) self.checks.update( self.createChecks( (eventtype, eventtype_re, eventtype_fn, "prop:event.type"), )) # for branch change filter, we take the real gerrit branch # instead of the change's branch, which is also used as a grouping key if "branch" in self.checks: self.checks["prop:event.change.branch"] = self.checks["branch"] del self.checks["branch"] def _gerrit_user_to_author(props, username="unknown"): """ Convert Gerrit account properties to Buildbot format Take into account missing values """ username = props.get("username", username) username = props.get("name", username) if "email" in props: username += " <%(email)s>" % props return username class GerritChangeSourceBase(base.ChangeSource): """This source will maintain a connection to gerrit ssh server that will provide us gerrit events in json format.""" compare_attrs = ("gerritserver", "gerritport") name = None # list of properties that are no of no use to be put in the event dict EVENT_PROPERTY_BLACKLIST = ["event.eventCreatedOn"] def checkConfig(self, gitBaseURL=None, handled_events=("patchset-created", "ref-updated"), debug=False, get_files=False): if gitBaseURL is None: config.error("gitBaseURL must be specified") def reconfigService(self, gitBaseURL=None, handled_events=("patchset-created", "ref-updated"), debug=False, get_files=False): self.gitBaseURL = gitBaseURL self.handled_events = list(handled_events) self._get_files = get_files self.debug = debug def lineReceived(self, line): try: event = json.loads(bytes2unicode(line)) except ValueError: msg = "bad json line: %s" log.msg(msg % line) return defer.succeed(None) if not(isinstance(event, dict) and "type" in event): if self.debug: msg = "no type in event %s" log.msg(msg % line) return defer.succeed(None) return self.eventReceived(event) def eventReceived(self, event): if not (event['type'] in self.handled_events): if self.debug: msg = "the event type '%s' is not setup to handle" log.msg(msg % event['type']) return defer.succeed(None) # flatten the event dictionary, for easy access with WithProperties def flatten(properties, base, event): for k, v in event.items(): name = "%s.%s" % (base, k) if name in self.EVENT_PROPERTY_BLACKLIST: continue if isinstance(v, dict): flatten(properties, name, v) else: # already there properties[name] = v properties = {} flatten(properties, "event", event) properties["event.source"] = self.__class__.__name__ func_name = "eventReceived_%s" % event["type"].replace("-", "_") func = getattr(self, func_name, None) if func is None: return self.addChangeFromEvent(properties, event) return func(properties, event) @defer.inlineCallbacks def addChange(self, chdict): stampdict = { "branch": chdict["branch"], "revision": chdict["revision"], "patch_author": chdict["author"], "patch_comment": chdict["comments"], "repository": chdict["repository"], "project": chdict["project"], } stampid, found_existing = yield( self.master.db.sourcestamps.findOrCreateId(**stampdict)) if found_existing: if self.debug or True: eventstr = "{}/{} -- {}:{}".format( self.gitBaseURL, chdict["project"], chdict["branch"], chdict["revision"]) message = ( "gerrit: duplicate change event {} by {}" .format(eventstr, self.__class__.__name__)) log.msg(message.encode("utf-8")) defer.returnValue(None) if self.debug: eventstr = "{} -- {}:{}".format( chdict["repository"], chdict["branch"], chdict["revision"]) message = ( "gerrit: adding change from {} in {}" .format(eventstr, self.__class__.__name__)) log.msg(message.encode("utf-8")) try: yield self.master.data.updates.addChange(**chdict) except Exception: # eat failures.. log.err('error adding change from GerritChangeSource') def getGroupingPolicyFromEvent(self, event): # At the moment, buildbot's change grouping strategy is hardcoded at various place # to be the 'branch' of an event. # With gerrit, you usually want to group by branch on post commit, and by changeid # on pre-commit. # we keep this customization point here, waiting to have a better grouping strategy support # in the core event_change = event["change"] if event['type'] in ('patchset-created',): return "%s/%s" % (event_change["branch"], event_change['number']) return event_change["branch"] @defer.inlineCallbacks def addChangeFromEvent(self, properties, event): if "change" not in event: if self.debug: log.msg("unsupported event %s" % (event["type"],)) return defer.returnValue(None) if "patchSet" not in event: if self.debug: log.msg("unsupported event %s" % (event["type"],)) return defer.returnValue(None) event = _canonicalize_event(event) event_change = event["change"] files = ["unknown"] if self._get_files: files = yield self.getFiles( change=event_change["number"], patchset=event["patchSet"]["number"] ) yield self.addChange({ 'author': _gerrit_user_to_author(event_change["owner"]), 'project': util.bytes2unicode(event_change["project"]), 'repository': "{}/{}".format( self.gitBaseURL, event_change["project"]), 'branch': self.getGroupingPolicyFromEvent(event), 'revision': event["patchSet"]["revision"], 'revlink': event_change["url"], 'comments': event_change["subject"], 'files': files, 'category': event["type"], 'properties': properties}) def eventReceived_ref_updated(self, properties, event): ref = event["refUpdate"] author = "gerrit" if "submitter" in event: author = _gerrit_user_to_author(event["submitter"], author) return self.addChange(dict( author=author, project=ref["project"], repository="%s/%s" % ( self.gitBaseURL, ref["project"]), branch=ref["refName"], revision=ref["newRev"], comments="Gerrit: patchset(s) merged.", files=["unknown"], category=event["type"], properties=properties)) class GerritChangeSource(GerritChangeSourceBase): """This source will maintain a connection to gerrit ssh server that will provide us gerrit events in json format.""" compare_attrs = ("gerritserver", "gerritport") STREAM_GOOD_CONNECTION_TIME = 120 "(seconds) connections longer than this are considered good, and reset the backoff timer" STREAM_BACKOFF_MIN = 0.5 "(seconds) minimum, but nonzero, time to wait before retrying a failed connection" STREAM_BACKOFF_EXPONENT = 1.5 "multiplier used to increase the backoff from MIN to MAX on repeated failures" STREAM_BACKOFF_MAX = 60 "(seconds) maximum time to wait before retrying a failed connection" name = None def checkConfig(self, gerritserver, username, gerritport=29418, identity_file=None, **kwargs): if self.name is None: self.name = "GerritChangeSource:%s@%s:%d" % ( username, gerritserver, gerritport) if 'gitBaseURL' not in kwargs: kwargs['gitBaseURL'] = "automatic at reconfigure" super().checkConfig(**kwargs) def reconfigService(self, gerritserver, username, gerritport=29418, identity_file=None, name=None, **kwargs): if 'gitBaseURL' not in kwargs: kwargs['gitBaseURL'] = "ssh://%s@%s:%s" % (username, gerritserver, gerritport) self.gerritserver = gerritserver self.gerritport = gerritport self.username = username self.identity_file = identity_file self.process = None self.wantProcess = False self.streamProcessTimeout = self.STREAM_BACKOFF_MIN return super().reconfigService(**kwargs) class LocalPP(ProcessProtocol): def __init__(self, change_source): self.change_source = change_source self.data = b"" @defer.inlineCallbacks def outReceived(self, data): """Do line buffering.""" self.data += data lines = self.data.split(b"\n") # last line is either empty or incomplete self.data = lines.pop(-1) for line in lines: if self.change_source.debug: log.msg(b"gerrit: " + line) yield self.change_source.lineReceived(line) def errReceived(self, data): if self.change_source.debug: log.msg(b"gerrit stderr: " + data) def processEnded(self, status_object): self.change_source.streamProcessStopped() def streamProcessStopped(self): self.process = None # if the service is stopped, don't try to restart the process if not self.wantProcess or not self.running: return now = util.now() if now - self.lastStreamProcessStart < \ self.STREAM_GOOD_CONNECTION_TIME: # bad startup; start the stream process again after a timeout, # and then increase the timeout log.msg( "'gerrit stream-events' failed; restarting after %ds" % round(self.streamProcessTimeout)) self.master.reactor.callLater( self.streamProcessTimeout, self.startStreamProcess) self.streamProcessTimeout *= self.STREAM_BACKOFF_EXPONENT if self.streamProcessTimeout > self.STREAM_BACKOFF_MAX: self.streamProcessTimeout = self.STREAM_BACKOFF_MAX else: # good startup, but lost connection; restart immediately, # and set the timeout to its minimum # make sure we log the reconnection, so that it might be detected # and network connectivity fixed log.msg("gerrit stream-events lost connection. Reconnecting...") self.startStreamProcess() self.streamProcessTimeout = self.STREAM_BACKOFF_MIN def _buildGerritCommand(self, *gerrit_args): '''Get an ssh command list which invokes gerrit with the given args on the remote host''' cmd = [ "ssh", "%s@%s" % (self.username, self.gerritserver), "-p", str(self.gerritport) ] if self.identity_file is not None: cmd.extend(["-i", self.identity_file]) cmd.append("gerrit") cmd.extend(gerrit_args) return cmd def startStreamProcess(self): if self.debug: log.msg("starting 'gerrit stream-events'") cmd = self._buildGerritCommand("stream-events") self.lastStreamProcessStart = util.now() self.process = reactor.spawnProcess(self.LocalPP(self), "ssh", cmd, env=None) @defer.inlineCallbacks def getFiles(self, change, patchset): cmd = self._buildGerritCommand("query", str(change), "--format", "JSON", "--files", "--patch-sets") if self.debug: log.msg("querying gerrit for changed files in change %s/%s: %s" % (change, patchset, cmd)) out = yield utils.getProcessOutput(cmd[0], cmd[1:], env=None) out = out.splitlines()[0] res = json.loads(bytes2unicode(out)) if res.get("rowCount") == 0: return ["unknown"] patchsets = {i["number"]: i["files"] for i in res["patchSets"]} return [i["file"] for i in patchsets[int(patchset)]] def activate(self): self.wantProcess = True self.startStreamProcess() def deactivate(self): self.wantProcess = False if self.process: self.process.signalProcess("KILL") # TODO: if this occurs while the process is restarting, some exceptions # may be logged, although things will settle down normally def describe(self): status = "" if not self.process: status = "[NOT CONNECTED - check log]" msg = ("GerritChangeSource watching the remote " "Gerrit repository %s@%s %s") return msg % (self.username, self.gerritserver, status) class GerritEventLogPoller(GerritChangeSourceBase): POLL_INTERVAL_SEC = 30 FIRST_FETCH_LOOKBACK_DAYS = 30 def checkConfig(self, baseURL, auth, pollInterval=POLL_INTERVAL_SEC, pollAtLaunch=True, firstFetchLookback=FIRST_FETCH_LOOKBACK_DAYS, **kwargs): if self.name is None: self.name = "GerritEventLogPoller:{}".format(baseURL) super().checkConfig(**kwargs) @defer.inlineCallbacks def reconfigService(self, baseURL, auth, pollInterval=POLL_INTERVAL_SEC, pollAtLaunch=True, firstFetchLookback=FIRST_FETCH_LOOKBACK_DAYS, **kwargs): yield super().reconfigService(**kwargs) if baseURL.endswith('/'): baseURL = baseURL[:-1] self._pollInterval = pollInterval self._pollAtLaunch = pollAtLaunch self._oid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, auth=auth) self._first_fetch_lookback = firstFetchLookback self._last_event_time = None @staticmethod def now(): """patchable now (datetime is not patchable as builtin)""" return datetime.datetime.utcnow() @defer.inlineCallbacks def poll(self): last_event_ts = yield self.master.db.state.getState(self._oid, 'last_event_ts', None) if last_event_ts is None: # If there is not last event time stored in the database, then set # the last event time to some historical look-back last_event = self.now() - datetime.timedelta(days=self._first_fetch_lookback) else: last_event = datetime.datetime.utcfromtimestamp(last_event_ts) last_event_formatted = last_event.strftime("%Y-%m-%d %H:%M:%S") if self.debug: log.msg("Polling gerrit: {}".format(last_event_formatted).encode("utf-8")) res = yield self._http.get("/plugins/events-log/events/", params=dict(t1=last_event_formatted)) lines = yield res.content() for line in lines.splitlines(): yield self.lineReceived(line) @defer.inlineCallbacks def eventReceived(self, event): res = yield super().eventReceived(event) if 'eventCreatedOn' in event: yield self.master.db.state.setState(self._oid, 'last_event_ts', event['eventCreatedOn']) return res @defer.inlineCallbacks def getFiles(self, change, patchset): res = yield self._http.get("/changes/%s/revisions/%s/files/" % (change, patchset)) res = yield res.content() res = res.splitlines()[1].decode('utf8') # the first line of every response is `)]}'` return list(json.loads(res)) # FIXME this copy the code from PollingChangeSource # but as PollingChangeSource and its subclasses need to be ported to reconfigurability # we can't use it right now @base.poll_method def doPoll(self): d = defer.maybeDeferred(self.poll) d.addErrback(log.err, 'while polling for changes') return d def force(self): self.doPoll() def activate(self): self.doPoll.start(interval=self._pollInterval, now=self._pollAtLaunch) def deactivate(self): return self.doPoll.stop() def describe(self): msg = ("GerritEventLogPoller watching the remote " "Gerrit repository {}") return msg.format(self.name) buildbot-2.6.0/master/buildbot/changes/github.py000066400000000000000000000253241361162603000216720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from datetime import datetime from fnmatch import fnmatch from twisted.internet import defer from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import httpclientservice from buildbot.util.logger import Logger from buildbot.util.state import StateMixin log = Logger() HOSTED_BASE_URL = "https://api.github.com" link_urls = { "https": "clone_url", "svn": "svn_url", "git": "git_url", "ssh": "ssh_url" } class PullRequestMixin: def extractProperties(self, payload): def flatten(properties, base, info_dict): for k, v in info_dict.items(): name = ".".join([base, k]) if isinstance(v, dict): flatten(properties, name, v) elif any([fnmatch(name, expr) for expr in self.github_property_whitelist]): properties[name] = v properties = {} flatten(properties, "github", payload) return properties class GitHubPullrequestPoller(base.ReconfigurablePollingChangeSource, StateMixin, PullRequestMixin): compare_attrs = ("owner", "repo", "token", "branches", "pollInterval", "category", "pollAtLaunch", "name") db_class_name = 'GitHubPullrequestPoller' def __init__(self, owner, repo, **kwargs): name = kwargs.get("name") if not name: kwargs["name"] = "GitHubPullrequestPoller:" + owner + "/" + repo super(GitHubPullrequestPoller, self).__init__(owner, repo, **kwargs) def checkConfig(self, owner, repo, branches=None, category='pull', baseURL=None, pullrequest_filter=True, token=None, magic_link=False, repository_type="https", github_property_whitelist=None, **kwargs): if repository_type not in ["https", "svn", "git", "ssh"]: config.error( "repository_type must be one of {https, svn, git, ssh}") super().checkConfig(name=self.name, **kwargs) @defer.inlineCallbacks def reconfigService(self, owner, repo, branches=None, pollInterval=10 * 60, category=None, baseURL=None, pullrequest_filter=True, token=None, pollAtLaunch=False, magic_link=False, repository_type="https", github_property_whitelist=None, **kwargs): yield super().reconfigService(name=self.name, **kwargs) if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] http_headers = {'User-Agent': 'Buildbot'} if token is not None: http_headers.update({'Authorization': 'token ' + token}) self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers=http_headers) self.token = token self.owner = owner self.repo = repo self.branches = branches self.github_property_whitelist = github_property_whitelist self.pollInterval = pollInterval self.pollAtLaunch = pollAtLaunch self.repository_type = link_urls[repository_type] self.magic_link = magic_link if github_property_whitelist is None: self.github_property_whitelist = [] if callable(pullrequest_filter): self.pullrequest_filter = pullrequest_filter else: self.pullrequest_filter = (lambda _: pullrequest_filter) self.category = category if callable(category) else bytes2unicode( category) def describe(self): return "GitHubPullrequestPoller watching the "\ "GitHub repository %s/%s" % ( self.owner, self.repo) @defer.inlineCallbacks def _getPullInformation(self, pull_number): result = yield self._http.get('/'.join( ['/repos', self.owner, self.repo, 'pulls', str(pull_number)])) my_json = yield result.json() return my_json @defer.inlineCallbacks def _getPulls(self): log.debug("GitHubPullrequestPoller: polling " "GitHub repository %s/%s, branches: %s" % (self.owner, self.repo, self.branches)) result = yield self._http.get('/'.join( ['/repos', self.owner, self.repo, 'pulls'])) my_json = yield result.json() return my_json @defer.inlineCallbacks def _getFiles(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'files' ])) my_json = yield result.json() return [f["filename"] for f in my_json] @defer.inlineCallbacks def _getCommitters(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'commits' ])) my_json = yield result.json() return [[c["commit"]["committer"]["name"], c["commit"]["committer"]["email"]] for c in my_json] @defer.inlineCallbacks def _getAuthors(self, prnumber): result = yield self._http.get("/".join([ '/repos', self.owner, self.repo, 'pulls', str(prnumber), 'commits' ])) my_json = yield result.json() return [[a["commit"]["author"]["name"], a["commit"]["author"]["email"]] for a in my_json] @defer.inlineCallbacks def _getCurrentRev(self, prnumber): # Get currently assigned revision of PR number result = yield self._getStateObjectId() rev = yield self.master.db.state.getState(result, 'pull_request%d' % prnumber, None) return rev @defer.inlineCallbacks def _setCurrentRev(self, prnumber, rev): # Set the updated revision for PR number. result = yield self._getStateObjectId() yield self.master.db.state.setState(result, 'pull_request%d' % prnumber, rev) @defer.inlineCallbacks def _getStateObjectId(self): # Return a deferred for object id in state db. result = yield self.master.db.state.getObjectId( '%s/%s' % (self.owner, self.repo), self.db_class_name) return result @defer.inlineCallbacks def _processChanges(self, github_result): for pr in github_result: # Track PRs for specified branches base_branch = pr['base']['ref'] prnumber = pr['number'] revision = pr['head']['sha'] # Check to see if the branch is set or matches if self.branches is not None and base_branch not in self.branches: continue if (self.pullrequest_filter is not None and not self.pullrequest_filter(pr)): continue current = yield self._getCurrentRev(prnumber) if not current or current[0:12] != revision[0:12]: # Access title, repo, html link, and comments pr = yield self._getPullInformation(prnumber) title = pr['title'] if self.magic_link: branch = 'refs/pull/{:d}/merge'.format(prnumber) repo = pr['base']['repo'][self.repository_type] else: branch = pr['head']['ref'] repo = pr['head']['repo'][self.repository_type] revlink = pr['html_url'] comments = pr['body'] updated = datetime.strptime(pr['updated_at'], '%Y-%m-%dT%H:%M:%SZ') # update database yield self._setCurrentRev(prnumber, revision) project = pr['base']['repo']['full_name'] commits = pr['commits'] dl = defer.DeferredList( [self._getAuthors(prnumber), self._getCommitters(prnumber), self._getFiles(prnumber)], consumeErrors=True) results = yield dl failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.error("while processing changes for " "Pullrequest {} revision {}".format( prnumber, revision)) # Fail on the first error! failures[0].raiseException() [authors, committers, files] = [r[1] for r in results] author = authors[0][0] + " <" + authors[0][1] + ">" committer = committers[0][0] + " <" + committers[0][1] + ">" properties = self.extractProperties(pr) # emit the change yield self.master.data.updates.addChange( author=author, committer=committer, revision=bytes2unicode(revision), revlink=bytes2unicode(revlink), comments='GitHub Pull Request #{0} ({1} commit{2})\n{3}\n{4}'. format(prnumber, commits, 's' if commits > 0 else '', title, comments), when_timestamp=datetime2epoch(updated), branch=bytes2unicode(branch), category=self.category, project=project, repository=bytes2unicode(repo), files=files, properties=properties, src='git') @defer.inlineCallbacks def poll(self): result = yield self._getPulls() yield self._processChanges(result) buildbot-2.6.0/master/buildbot/changes/gitpoller.py000066400000000000000000000411501361162603000224040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import stat from urllib.parse import quote as urlquote from twisted.internet import defer from twisted.internet import utils from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import private_tempdir from buildbot.util.git import GitMixin from buildbot.util.git import getSshKnownHostsContents from buildbot.util.misc import writeLocalFile from buildbot.util.state import StateMixin class GitError(Exception): """Raised when git exits with code 128.""" class GitPoller(base.PollingChangeSource, StateMixin, GitMixin): """This source will poll a remote git repo for changes and submit them to the change master.""" compare_attrs = ("repourl", "branches", "workdir", "pollInterval", "gitbin", "usetimestamps", "category", "project", "pollAtLaunch", "buildPushesWithNoCommits", "sshPrivateKey", "sshHostKey", "sshKnownHosts") secrets = ("sshPrivateKey", "sshHostKey", "sshKnownHosts") def __init__(self, repourl, branches=None, branch=None, workdir=None, pollInterval=10 * 60, gitbin='git', usetimestamps=True, category=None, project=None, pollinterval=-2, fetch_refspec=None, encoding='utf-8', name=None, pollAtLaunch=False, buildPushesWithNoCommits=False, only_tags=False, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = repourl super().__init__(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, sshPrivateKey=sshPrivateKey, sshHostKey=sshHostKey, sshKnownHosts=sshKnownHosts) if project is None: project = '' if only_tags and (branch or branches): config.error("GitPoller: can't specify only_tags and branch/branches") if branch and branches: config.error("GitPoller: can't specify both branch and branches") elif branch: branches = [branch] elif not branches: if only_tags: branches = lambda ref: ref.startswith('refs/tags/') # noqa: E731 else: branches = ['master'] self.repourl = repourl self.branches = branches self.encoding = encoding self.buildPushesWithNoCommits = buildPushesWithNoCommits self.gitbin = gitbin self.workdir = workdir self.usetimestamps = usetimestamps self.category = category if callable( category) else bytes2unicode(category, encoding=self.encoding) self.project = bytes2unicode(project, encoding=self.encoding) self.changeCount = 0 self.lastRev = {} self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.setupGit(logname='GitPoller') if fetch_refspec is not None: config.error("GitPoller: fetch_refspec is no longer supported. " "Instead, only the given branches are downloaded.") if self.workdir is None: self.workdir = 'gitpoller-work' @defer.inlineCallbacks def _checkGitFeatures(self): stdout = yield self._dovccmd('--version', []) self.parseGitFeatures(stdout) if not self.gitInstalled: raise EnvironmentError('Git is not installed') if (self.sshPrivateKey is not None and not self.supportsSshPrivateKeyAsEnvOption): raise EnvironmentError('SSH private keys require Git 2.3.0 or newer') @defer.inlineCallbacks def activate(self): # make our workdir absolute, relative to the master's basedir if not os.path.isabs(self.workdir): self.workdir = os.path.join(self.master.basedir, self.workdir) log.msg("gitpoller: using workdir '{}'".format(self.workdir)) try: self.lastRev = yield self.getState('lastRev', {}) super().activate() except Exception as e: log.err(e, 'while initializing GitPoller repository') def describe(self): str = ('GitPoller watching the remote git repository ' + bytes2unicode(self.repourl, self.encoding)) if self.branches: if self.branches is True: str += ', branches: ALL' elif not callable(self.branches): str += ', branches: ' + ', '.join(self.branches) if not self.master: str += " [STOPPED - check log]" return str def _getBranches(self): d = self._dovccmd('ls-remote', ['--refs', self.repourl]) @d.addCallback def parseRemote(rows): branches = [] for row in rows.splitlines(): if '\t' not in row: # Not a useful line continue sha, ref = row.split("\t") branches.append(ref) return branches return d def _headsFilter(self, branch): """Filter out remote references that don't begin with 'refs/heads'.""" return branch.startswith("refs/heads/") def _removeHeads(self, branch): """Remove 'refs/heads/' prefix from remote references.""" if branch.startswith("refs/heads/"): branch = branch[11:] return branch def _trackerBranch(self, branch): # manually quote tilde for Python 3.7 url = urlquote(self.repourl, '').replace('~', '%7E') return "refs/buildbot/{}/{}".format(url, self._removeHeads(branch)) @defer.inlineCallbacks def poll(self): yield self._checkGitFeatures() try: yield self._dovccmd('init', ['--bare', self.workdir]) except GitError as e: log.msg(e.args[0]) return branches = self.branches if self.branches else [] remote_refs = yield self._getBranches() if branches is True or callable(branches): if callable(self.branches): branches = [b for b in remote_refs if self.branches(b)] else: branches = [b for b in remote_refs if self._headsFilter(b)] elif branches and remote_refs: remote_branches = [self._removeHeads(b) for b in remote_refs] branches = sorted(list(set(branches) & set(remote_branches))) refspecs = [ '+{}:{}'.format(self._removeHeads(branch), self._trackerBranch(branch)) for branch in branches ] try: yield self._dovccmd('fetch', [self.repourl] + refspecs, path=self.workdir) except GitError as e: log.msg(e.args[0]) return revs = {} log.msg('gitpoller: processing changes from "{}"'.format(self.repourl)) for branch in branches: try: rev = yield self._dovccmd( 'rev-parse', [self._trackerBranch(branch)], path=self.workdir) revs[branch] = bytes2unicode(rev, self.encoding) yield self._process_changes(revs[branch], branch) except Exception: log.err(_why="trying to poll branch {} of {}".format( branch, self.repourl)) self.lastRev.update(revs) yield self.setState('lastRev', self.lastRev) def _get_commit_comments(self, rev): args = ['--no-walk', r'--format=%s%n%b', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) return d def _get_commit_timestamp(self, rev): # unix timestamp args = ['--no-walk', r'--format=%ct', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) @d.addCallback def process(git_output): if self.usetimestamps: try: stamp = int(git_output) except Exception as e: log.msg( 'gitpoller: caught exception converting output \'{}\' to timestamp'.format(git_output)) raise e return stamp return None return d def _get_commit_files(self, rev): args = ['--name-only', '--no-walk', r'--format=%n', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) def decode_file(file): # git use octal char sequences in quotes when non ASCII match = re.match('^"(.*)"$', file) if match: file = bytes2unicode(match.groups()[0], encoding=self.encoding, errors='unicode_escape') return bytes2unicode(file, encoding=self.encoding) @d.addCallback def process(git_output): fileList = [decode_file(file) for file in [s for s in git_output.splitlines() if len(s)]] return fileList return d def _get_commit_author(self, rev): args = ['--no-walk', r'--format=%aN <%aE>', rev, '--'] d = self._dovccmd('log', args, path=self.workdir) @d.addCallback def process(git_output): if not git_output: raise EnvironmentError('could not get commit author for rev') return git_output return d @defer.inlineCallbacks def _get_commit_committer(self, rev): args = ['--no-walk', r'--format=%cN <%cE>', rev, '--'] res = yield self._dovccmd('log', args, path=self.workdir) if not res: raise EnvironmentError('could not get commit committer for rev') return res @defer.inlineCallbacks def _process_changes(self, newRev, branch): """ Read changes since last change. - Read list of commit hashes. - Extract details from each commit. - Add changes to database. """ # initial run, don't parse all history if not self.lastRev: return # get the change list revListArgs = (['--format=%H', '{}'.format(newRev)] + ['^' + rev for rev in sorted(self.lastRev.values())] + ['--']) self.changeCount = 0 results = yield self._dovccmd('log', revListArgs, path=self.workdir) # process oldest change first revList = results.split() revList.reverse() if self.buildPushesWithNoCommits and not revList: existingRev = self.lastRev.get(branch) if existingRev != newRev: revList = [newRev] if existingRev is None: # This branch was completely unknown, rebuild log.msg('gitpoller: rebuilding {} for new branch "{}"'.format( newRev, branch)) else: # This branch is known, but it now points to a different # commit than last time we saw it, rebuild. log.msg('gitpoller: rebuilding {} for updated branch "{}"'.format( newRev, branch)) self.changeCount = len(revList) self.lastRev[branch] = newRev if self.changeCount: log.msg('gitpoller: processing {} changes: {} from "{}" branch "{}"'.format( self.changeCount, revList, self.repourl, branch)) for rev in revList: dl = defer.DeferredList([ self._get_commit_timestamp(rev), self._get_commit_author(rev), self._get_commit_committer(rev), self._get_commit_files(rev), self._get_commit_comments(rev), ], consumeErrors=True) results = yield dl # check for failures failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.err( failure, "while processing changes for {} {}".format(newRev, branch)) # just fail on the first error; they're probably all related! failures[0].raiseException() timestamp, author, committer, files, comments = [r[1] for r in results] yield self.master.data.updates.addChange( author=author, committer=committer, revision=bytes2unicode(rev, encoding=self.encoding), files=files, comments=comments, when_timestamp=timestamp, branch=bytes2unicode(self._removeHeads(branch)), project=self.project, repository=bytes2unicode(self.repourl, encoding=self.encoding), category=self.category, src='git') def _isSshPrivateKeyNeededForCommand(self, command): commandsThatNeedKey = [ 'fetch', 'ls-remote', ] if self.sshPrivateKey is not None and command in commandsThatNeedKey: return True return False def _downloadSshPrivateKey(self, keyPath): # We change the permissions of the key file to be user-readable only so # that ssh does not complain. This is not used for security because the # parent directory will have proper permissions. writeLocalFile(keyPath, self.sshPrivateKey, mode=stat.S_IRUSR) def _downloadSshKnownHosts(self, path): if self.sshKnownHosts is not None: contents = self.sshKnownHosts else: contents = getSshKnownHostsContents(self.sshHostKey) writeLocalFile(path, contents) def _getSshPrivateKeyPath(self, ssh_data_path): return os.path.join(ssh_data_path, 'ssh-key') def _getSshKnownHostsPath(self, ssh_data_path): return os.path.join(ssh_data_path, 'ssh-known-hosts') @defer.inlineCallbacks def _dovccmd(self, command, args, path=None): if self._isSshPrivateKeyNeededForCommand(command): with private_tempdir.PrivateTemporaryDirectory( dir=self.workdir, prefix='.buildbot-ssh') as tmp_path: stdout = yield self._dovccmdImpl(command, args, path, tmp_path) else: stdout = yield self._dovccmdImpl(command, args, path, None) return stdout @defer.inlineCallbacks def _dovccmdImpl(self, command, args, path, ssh_workdir): full_args = [] full_env = os.environ.copy() if self._isSshPrivateKeyNeededForCommand(command): key_path = self._getSshPrivateKeyPath(ssh_workdir) self._downloadSshPrivateKey(key_path) known_hosts_path = None if self.sshHostKey is not None or self.sshKnownHosts is not None: known_hosts_path = self._getSshKnownHostsPath(ssh_workdir) self._downloadSshKnownHosts(known_hosts_path) self.adjustCommandParamsForSshPrivateKey(full_args, full_env, key_path, None, known_hosts_path) full_args += [command] + args res = yield utils.getProcessOutputAndValue(self.gitbin, full_args, path=path, env=full_env) (stdout, stderr, code) = res stdout = bytes2unicode(stdout, self.encoding) stderr = bytes2unicode(stderr, self.encoding) if code != 0: if code == 128: raise GitError('command {} in {} on repourl {} failed with exit code {}: {}'.format( full_args, path, self.repourl, code, stderr)) raise EnvironmentError('command {} in {} on repourl {} failed with exit code {}: {}'.format( full_args, path, self.repourl, code, stderr)) return stdout.strip() buildbot-2.6.0/master/buildbot/changes/hgpoller.py000066400000000000000000000322141361162603000222200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import time from twisted.internet import defer from twisted.internet import utils from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.util import bytes2unicode from buildbot.util import deferredLocked from buildbot.util.state import StateMixin class HgPoller(base.PollingChangeSource, StateMixin): """This source will poll a remote hg repo for changes and submit them to the change master.""" compare_attrs = ("repourl", "branch", "branches", "bookmarks", "workdir", "pollInterval", "hgpoller", "usetimestamps", "category", "project", "pollAtLaunch") db_class_name = 'HgPoller' def __init__(self, repourl, branch=None, branches=None, bookmarks=None, workdir=None, pollInterval=10 * 60, hgbin='hg', usetimestamps=True, category=None, project='', pollinterval=-2, encoding='utf-8', name=None, pollAtLaunch=False, revlink=lambda branch, revision: ('') ): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval self.repourl = repourl if branch and branches: config.error("HgPoller: can't specify both branch and branches") elif branch: self.branches = [branch] else: self.branches = branches or [] self.bookmarks = bookmarks or [] if name is None: name = repourl if self.bookmarks: name += "_" + "_".join(self.bookmarks) if self.branches: name += "_" + "_".join(self.branches) if not self.branches and not self.bookmarks: self.branches = ['default'] if not callable(revlink): config.error( "You need to provide a valid callable for revlink") super().__init__(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch) self.encoding = encoding self.lastChange = time.time() self.lastPoll = time.time() self.hgbin = hgbin self.workdir = workdir self.usetimestamps = usetimestamps self.category = category if callable( category) else bytes2unicode(category) self.project = project self.initLock = defer.DeferredLock() self.lastRev = {} self.revlink_callable = revlink if self.workdir is None: config.error("workdir is mandatory for now in HgPoller") @defer.inlineCallbacks def activate(self): self.lastRev = yield self.getState('lastRev', {}) super().activate() def describe(self): status = "" if not self.master: status = "[STOPPED - check log]" return ("HgPoller watching the remote Mercurial repository %r, " "branches: %r, in workdir %r %s") % (self.repourl, ', '.join(self.branches), self.workdir, status) @deferredLocked('initLock') def poll(self): d = self._getChanges() d.addCallback(self._processChanges) d.addErrback(self._processChangesFailure) return d def _absWorkdir(self): workdir = self.workdir if os.path.isabs(workdir): return workdir return os.path.join(self.master.basedir, workdir) def _getRevDetails(self, rev): """Return a deferred for (date, author, files, comments) of given rev. Deferred will be in error if rev is unknown. """ args = ['log', '-r', rev, os.linesep.join(( '--template={date|hgdate}', '{author}', "{files % '{file}" + os.pathsep + "'}", '{desc|strip}'))] # Mercurial fails with status 255 if rev is unknown d = utils.getProcessOutput(self.hgbin, args, path=self._absWorkdir(), env=os.environ, errortoo=False) @d.addCallback def process(output): # all file names are on one line output = output.decode(self.encoding, "replace") date, author, files, comments = output.split( os.linesep, 3) if not self.usetimestamps: stamp = None else: try: stamp = float(date.split()[0]) except Exception: log.msg('hgpoller: caught exception converting output %r ' 'to timestamp' % date) raise return stamp, author.strip(), files.split(os.pathsep)[:-1], comments.strip() return d def _isRepositoryReady(self): """Easy to patch in tests.""" return os.path.exists(os.path.join(self._absWorkdir(), '.hg')) def _initRepository(self): """Have mercurial init the workdir as a repository (hg init) if needed. hg init will also create all needed intermediate directories. """ if self._isRepositoryReady(): return defer.succeed(None) log.msg('hgpoller: initializing working dir from %s' % self.repourl) d = utils.getProcessOutputAndValue(self.hgbin, ['init', self._absWorkdir()], env=os.environ) d.addCallback(self._convertNonZeroToFailure) d.addErrback(self._stopOnFailure) d.addCallback(lambda _: log.msg( "hgpoller: finished initializing working dir %r" % self.workdir)) return d def _getChanges(self): self.lastPoll = time.time() d = self._initRepository() d.addCallback(lambda _: log.msg( "hgpoller: polling hg repo at %s" % self.repourl)) # get a deferred object that performs the fetch args = ['pull'] for name in self.branches: args += ['-b', name] for name in self.bookmarks: args += ['-B', name] args += [self.repourl] # This command always produces data on stderr, but we actually do not # care about the stderr or stdout from this command. # We set errortoo=True to avoid an errback from the deferred. # The callback which will be added to this # deferred will not use the response. d.addCallback(lambda _: utils.getProcessOutput( self.hgbin, args, path=self._absWorkdir(), env=os.environ, errortoo=True)) return d def _getCurrentRev(self, branch='default'): """Return a deferred for current numeric rev in state db. If never has been set, current rev is None. """ return self.lastRev.get(branch, None) def _setCurrentRev(self, rev, branch='default'): """Return a deferred to set current revision in persistent state.""" self.lastRev[branch] = str(rev) return self.setState('lastRev', self.lastRev) def _getHead(self, branch): """Return a deferred for branch head revision or None. We'll get an error if there is no head for this branch, which is probably a good thing, since it's probably a misspelling (if really buildbotting a branch that does not have any changeset yet, one shouldn't be surprised to get errors) """ d = utils.getProcessOutput(self.hgbin, ['heads', '-r', branch, '--template={rev}' + os.linesep], path=self._absWorkdir(), env=os.environ, errortoo=False) @d.addErrback def no_head_err(exc): log.err("hgpoller: could not find revision %r in repository %r" % ( branch, self.repourl)) @d.addCallback def results(heads): if not heads: return if len(heads.split()) > 1: log.err(("hgpoller: caught several heads in branch %r " "from repository %r. Staying at previous revision" "You should wait until the situation is normal again " "due to a merge or directly strip if remote repo " "gets stripped later.") % (branch, self.repourl)) return # in case of whole reconstruction, are we sure that we'll get the # same node -> rev assignations ? return heads.strip().decode(self.encoding) return d @defer.inlineCallbacks def _processChanges(self, unused_output): """Send info about pulled changes to the master and record current. HgPoller does the recording by moving the working dir to the head of the branch. We don't update the tree (unnecessary treatment and waste of space) instead, we simply store the current rev number in a file. Recall that hg rev numbers are local and incremental. """ for branch in self.branches + self.bookmarks: rev = yield self._getHead(branch) if rev is None: # Nothing pulled? continue yield self._processBranchChanges(rev, branch) @defer.inlineCallbacks def _getRevNodeList(self, revset): revListArgs = ['log', '-r', revset, r'--template={rev}:{node}\n'] results = yield utils.getProcessOutput(self.hgbin, revListArgs, path=self._absWorkdir(), env=os.environ, errortoo=False) results = results.decode(self.encoding) revNodeList = [rn.split(':', 1) for rn in results.strip().split()] defer.returnValue(revNodeList) @defer.inlineCallbacks def _processBranchChanges(self, new_rev, branch): prev_rev = yield self._getCurrentRev(branch) if new_rev == prev_rev: # Nothing new. return if prev_rev is None: # First time monitoring; start at the top. yield self._setCurrentRev(new_rev, branch) return # two passes for hg log makes parsing simpler (comments is multi-lines) revNodeList = yield self._getRevNodeList('{}::{}'.format(prev_rev, new_rev)) # revsets are inclusive. Strip the already-known "current" changeset. if not revNodeList: # empty revNodeList probably means the branch has changed head (strip of force push?) # in that case, we should still produce a change for that new rev (but we can't know how many parents were pushed) revNodeList = yield self._getRevNodeList(new_rev) else: del revNodeList[0] log.msg('hgpoller: processing %d changes in branch %r: %r in %r' % (len(revNodeList), branch, revNodeList, self._absWorkdir())) for rev, node in revNodeList: timestamp, author, files, comments = yield self._getRevDetails( node) yield self.master.data.updates.addChange( author=author, committer=None, revision=str(node), revlink=self.revlink_callable(branch, str(node)), files=files, comments=comments, when_timestamp=int(timestamp) if timestamp else None, branch=bytes2unicode(branch), category=bytes2unicode(self.category), project=bytes2unicode(self.project), repository=bytes2unicode(self.repourl), src='hg') # writing after addChange so that a rev is never missed, # but at once to avoid impact from later errors yield self._setCurrentRev(new_rev, branch) def _processChangesFailure(self, f): log.msg('hgpoller: repo poll failed') log.err(f) # eat the failure to continue along the deferred chain - we still want # to catch up return None def _convertNonZeroToFailure(self, res): "utility method to handle the result of getProcessOutputAndValue" (stdout, stderr, code) = res if code != 0: raise EnvironmentError( 'command failed with exit code %d: %s' % (code, stderr)) return (stdout, stderr, code) def _stopOnFailure(self, f): "utility method to stop the service when a failure occurs" if self.running: d = defer.maybeDeferred(self.stopService) d.addErrback(log.err, 'while stopping broken HgPoller service') return f buildbot-2.6.0/master/buildbot/changes/mail.py000066400000000000000000000455641361162603000213420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Parse various kinds of 'CVS notify' email. """ import calendar import datetime import re import time from email import message_from_file from email.iterators import body_line_iterator from email.utils import mktime_tz from email.utils import parseaddr from email.utils import parsedate_tz from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import util from buildbot.interfaces import IChangeSource from buildbot.util.maildir import MaildirService @implementer(IChangeSource) class MaildirSource(MaildirService, util.ComparableMixin): """Generic base class for Maildir-based change sources""" compare_attrs = ("basedir", "pollinterval", "prefix") name = 'MaildirSource' def __init__(self, maildir, prefix=None, category='', repository=''): super().__init__(maildir) self.prefix = prefix self.category = category self.repository = repository if prefix and not prefix.endswith("/"): log.msg("%s: you probably want your prefix=('%s') to end with " "a slash") def describe(self): return "%s watching maildir '%s'" % (self.__class__.__name__, self.basedir) def messageReceived(self, filename): d = defer.succeed(None) @d.addCallback def parse_file(_): with self.moveToCurDir(filename) as f: parsedFile = self.parse_file(f, self.prefix) return parsedFile @d.addCallback def add_change(chtuple): src, chdict = None, None if chtuple: src, chdict = chtuple if chdict: return self.master.data.updates.addChange(src=str(src), **chdict) else: log.msg("no change found in maildir file '%s'" % filename) return d def parse_file(self, fd, prefix=None): m = message_from_file(fd) return self.parse(m, prefix) class CVSMaildirSource(MaildirSource): name = "CVSMaildirSource" def __init__(self, maildir, prefix=None, category='', repository='', properties=None): super().__init__(maildir, prefix, category, repository) if properties is None: properties = {} self.properties = properties def parse(self, m, prefix=None): """Parse messages sent by the 'buildbot-cvs-mail' program. """ # The mail is sent from the person doing the checkin. Assume that the # local username is enough to identify them (this assumes a one-server # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS # model) name, addr = parseaddr(m["from"]) if not addr: # no From means this message isn't from buildbot-cvs-mail return None at = addr.find("@") if at == -1: author = addr # might still be useful else: author = addr[:at] author = util.bytes2unicode(author, encoding="ascii") # CVS accepts RFC822 dates. buildbot-cvs-mail adds the date as # part of the mail header, so use that. # This assumes cvs is being access via ssh or pserver, so the time # will be the CVS server's time. # calculate a "revision" based on that timestamp, or the current time # if we're unable to parse the date. log.msg('Processing CVS mail') dateTuple = parsedate_tz(m["date"]) if dateTuple is None: when = util.now() else: when = mktime_tz(dateTuple) theTime = datetime.datetime.utcfromtimestamp(float(when)) rev = theTime.strftime('%Y-%m-%d %H:%M:%S') catRE = re.compile(r'^Category:\s*(\S.*)') cvsRE = re.compile(r'^CVSROOT:\s*(\S.*)') cvsmodeRE = re.compile(r'^Cvsmode:\s*(\S.*)') filesRE = re.compile(r'^Files:\s*(\S.*)') modRE = re.compile(r'^Module:\s*(\S.*)') pathRE = re.compile(r'^Path:\s*(\S.*)') projRE = re.compile(r'^Project:\s*(\S.*)') singleFileRE = re.compile(r'(.*) (NONE|\d(\.|\d)+) (NONE|\d(\.|\d)+)') tagRE = re.compile(r'^\s+Tag:\s*(\S.*)') updateRE = re.compile(r'^Update of:\s*(\S.*)') comments = "" branch = None cvsroot = None fileList = None files = [] isdir = 0 path = None project = None lines = list(body_line_iterator(m)) while lines: line = lines.pop(0) m = catRE.match(line) if m: category = m.group(1) continue m = cvsRE.match(line) if m: cvsroot = m.group(1) continue m = cvsmodeRE.match(line) if m: cvsmode = m.group(1) continue m = filesRE.match(line) if m: fileList = m.group(1) continue m = modRE.match(line) if m: # We don't actually use this # module = m.group(1) continue m = pathRE.match(line) if m: path = m.group(1) continue m = projRE.match(line) if m: project = m.group(1) continue m = tagRE.match(line) if m: branch = m.group(1) continue m = updateRE.match(line) if m: # We don't actually use this # updateof = m.group(1) continue if line == "Log Message:\n": break # CVS 1.11 lists files as: # repo/path file,old-version,new-version file2,old-version,new-version # Version 1.12 lists files as: # file1 old-version new-version file2 old-version new-version # # files consists of tuples of 'file-name old-version new-version' # The versions are either dotted-decimal version numbers, ie 1.1 # or NONE. New files are of the form 'NONE NUMBER', while removed # files are 'NUMBER NONE'. 'NONE' is a literal string # Parsing this instead of files list in 'Added File:' etc # makes it possible to handle files with embedded spaces, though # it could fail if the filename was 'bad 1.1 1.2' # For cvs version 1.11, we expect # my_module new_file.c,NONE,1.1 # my_module removed.txt,1.2,NONE # my_module modified_file.c,1.1,1.2 # While cvs version 1.12 gives us # new_file.c NONE 1.1 # removed.txt 1.2 NONE # modified_file.c 1.1,1.2 if fileList is None: log.msg('CVSMaildirSource Mail with no files. Ignoring') return None # We don't have any files. Email not from CVS if cvsmode == '1.11': # Please, no repo paths with spaces! m = re.search('([^ ]*) ', fileList) if m: path = m.group(1) else: log.msg( 'CVSMaildirSource can\'t get path from file list. Ignoring mail') return fileList = fileList[len(path):].strip() singleFileRE = re.compile( r'(.+?),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)') elif cvsmode == '1.12': singleFileRE = re.compile( r'(.+?) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)') if path is None: raise ValueError( 'CVSMaildirSource cvs 1.12 require path. Check cvs loginfo config') else: raise ValueError( 'Expected cvsmode 1.11 or 1.12. got: %s' % cvsmode) log.msg("CVSMaildirSource processing filelist: %s" % fileList) while(fileList): m = singleFileRE.match(fileList) if m: curFile = path + '/' + m.group(1) files.append(curFile) fileList = fileList[m.end():] else: log.msg('CVSMaildirSource no files matched regex. Ignoring') return None # bail - we couldn't parse the files that changed # Now get comments while lines: line = lines.pop(0) comments += line comments = comments.rstrip() + "\n" if comments == '\n': comments = None return ('cvs', dict(author=author, committer=None, files=files, comments=comments, isdir=isdir, when=when, branch=branch, revision=rev, category=category, repository=cvsroot, project=project, properties=self.properties)) # svn "commit-email.pl" handler. The format is very similar to freshcvs mail; # here's a sample: # From: username [at] apache.org [slightly obfuscated to avoid spam here] # To: commits [at] spamassassin.apache.org # Subject: svn commit: r105955 - in spamassassin/trunk: . lib/Mail # ... # # Author: username # Date: Sat Nov 20 00:17:49 2004 [note: TZ = local tz on server!] # New Revision: 105955 # # Modified: [also Removed: and Added:] # [filename] # ... # Log: # [log message] # ... # # # Modified: spamassassin/trunk/lib/Mail/SpamAssassin.pm # [unified diff] # # [end of mail] class SVNCommitEmailMaildirSource(MaildirSource): name = "SVN commit-email.pl" def parse(self, m, prefix=None): """Parse messages sent by the svn 'commit-email.pl' trigger. """ # The mail is sent from the person doing the checkin. Assume that the # local username is enough to identify them (this assumes a one-server # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS # model) name, addr = parseaddr(m["from"]) if not addr: return None # no From means this message isn't from svn at = addr.find("@") if at == -1: author = addr # might still be useful else: author = addr[:at] # we take the time of receipt as the time of checkin. Not correct (it # depends upon the email latency), but it avoids the # out-of-order-changes issue. Also syncmail doesn't give us anything # better to work with, unless you count pulling the v1-vs-v2 # timestamp out of the diffs, which would be ugly. TODO: Pulling the # 'Date:' header from the mail is a possibility, and # email.utils.parsedate_tz may be useful. It should be configurable, # however, because there are a lot of broken clocks out there. when = util.now() files = [] comments = "" lines = list(body_line_iterator(m)) rev = None while lines: line = lines.pop(0) # "Author: jmason" match = re.search(r"^Author: (\S+)", line) if match: author = match.group(1) # "New Revision: 105955" match = re.search(r"^New Revision: (\d+)", line) if match: rev = match.group(1) # possible TODO: use "Date: ..." data here instead of time of # commit message receipt, above. however, this timestamp is # specified *without* a timezone, in the server's local TZ, so to # be accurate buildbot would need a config setting to specify the # source server's expected TZ setting! messy. # this stanza ends with the "Log:" if (line == "Log:\n"): break # commit message is terminated by the file-listing section while lines: line = lines.pop(0) if line in ("Modified:\n", "Added:\n", "Removed:\n"): break comments += line comments = comments.rstrip() + "\n" while lines: line = lines.pop(0) if line == "\n": break if line.find("Modified:\n") == 0: continue # ignore this line if line.find("Added:\n") == 0: continue # ignore this line if line.find("Removed:\n") == 0: continue # ignore this line line = line.strip() thesefiles = line.split(" ") for f in thesefiles: if prefix: # insist that the file start with the prefix: we may get # changes we don't care about too if f.startswith(prefix): f = f[len(prefix):] else: log.msg("ignored file from svn commit: prefix '%s' " "does not match filename '%s'" % (prefix, f)) continue # TODO: figure out how new directories are described, set # .isdir files.append(f) if not files: log.msg("no matching files found, ignoring commit") return None return ('svn', dict(author=author, committer=None, files=files, comments=comments, when=when, revision=rev)) # bzr Launchpad branch subscription mails. Sample mail: # # From: noreply@launchpad.net # Subject: [Branch ~knielsen/maria/tmp-buildbot-test] Rev 2701: test add file # To: Joe # ... # # ------------------------------------------------------------ # revno: 2701 # committer: Joe # branch nick: tmpbb # timestamp: Fri 2009-05-15 10:35:43 +0200 # message: # test add file # added: # test-add-file # # # -- # # https://code.launchpad.net/~knielsen/maria/tmp-buildbot-test # # You are subscribed to branch lp:~knielsen/maria/tmp-buildbot-test. # To unsubscribe from this branch go to # https://code.launchpad.net/~knielsen/maria/tmp-buildbot-test/+edit-subscription. # # [end of mail] class BzrLaunchpadEmailMaildirSource(MaildirSource): name = "Launchpad" compare_attrs = ("branchMap", "defaultBranch") def __init__(self, maildir, prefix=None, branchMap=None, defaultBranch=None, **kwargs): self.branchMap = branchMap self.defaultBranch = defaultBranch super().__init__(maildir, prefix, **kwargs) def parse(self, m, prefix=None): """Parse branch notification messages sent by Launchpad. """ subject = m["subject"] match = re.search(r"^\s*\[Branch\s+([^]]+)\]", subject) if match: repository = match.group(1) else: repository = None # Put these into a dictionary, otherwise we cannot assign them # from nested function definitions. d = {'files': [], 'comments': ""} gobbler = None rev = None author = None when = util.now() def gobble_comment(s): d['comments'] += s + "\n" def gobble_removed(s): d['files'].append('%s REMOVED' % s) def gobble_added(s): d['files'].append('%s ADDED' % s) def gobble_modified(s): d['files'].append('%s MODIFIED' % s) def gobble_renamed(s): match = re.search(r"^(.+) => (.+)$", s) if match: d['files'].append('%s RENAMED %s' % (match.group(1), match.group(2))) else: d['files'].append('%s RENAMED' % s) lines = list(body_line_iterator(m, True)) rev = None while lines: line = str(lines.pop(0), "utf-8", errors="ignore") # revno: 101 match = re.search(r"^revno: ([0-9.]+)", line) if match: rev = match.group(1) # committer: Joe match = re.search(r"^committer: (.*)$", line) if match: author = match.group(1) # timestamp: Fri 2009-05-15 10:35:43 +0200 # datetime.strptime() is supposed to support %z for time zone, but # it does not seem to work. So handle the time zone manually. match = re.search( r"^timestamp: [a-zA-Z]{3} (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) ([-+])(\d{2})(\d{2})$", line) if match: datestr = match.group(1) tz_sign = match.group(2) tz_hours = match.group(3) tz_minutes = match.group(4) when = parseLaunchpadDate( datestr, tz_sign, tz_hours, tz_minutes) if re.search(r"^message:\s*$", line): gobbler = gobble_comment elif re.search(r"^removed:\s*$", line): gobbler = gobble_removed elif re.search(r"^added:\s*$", line): gobbler = gobble_added elif re.search(r"^renamed:\s*$", line): gobbler = gobble_renamed elif re.search(r"^modified:\s*$", line): gobbler = gobble_modified elif re.search(r"^ ", line) and gobbler: gobbler(line[2:-1]) # Use :-1 to gobble trailing newline # Determine the name of the branch. branch = None if self.branchMap and repository: if repository in self.branchMap: branch = self.branchMap[repository] elif ("lp:" + repository) in self.branchMap: branch = self.branchMap['lp:' + repository] if not branch: if self.defaultBranch: branch = self.defaultBranch else: if repository: branch = 'lp:' + repository else: branch = None if rev and author: return ('bzr', dict(author=author, committer=None, files=d['files'], comments=d['comments'], when=when, revision=rev, branch=branch, repository=repository or '')) return None def parseLaunchpadDate(datestr, tz_sign, tz_hours, tz_minutes): time_no_tz = calendar.timegm(time.strptime(datestr, "%Y-%m-%d %H:%M:%S")) tz_delta = 60 * 60 * int(tz_sign + tz_hours) + 60 * int(tz_minutes) return time_no_tz - tz_delta buildbot-2.6.0/master/buildbot/changes/manager.py000066400000000000000000000016611361162603000220200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.measured_service import MeasuredBuildbotServiceManager class ChangeManager(MeasuredBuildbotServiceManager): name = "ChangeManager" managed_services_name = "changesources" config_attr = "change_sources" buildbot-2.6.0/master/buildbot/changes/p4poller.py000066400000000000000000000321261361162603000221470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2011 National Instruments # Many thanks to Dave Peticolas for contributing this module import datetime import os import re import dateutil.tz from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import utils from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import base from buildbot.util import bytes2unicode debug_logging = False class P4PollerError(Exception): """Something went wrong with the poll. This is used as a distinctive exception type so that unit tests can detect and ignore it.""" class TicketLoginProtocol(protocol.ProcessProtocol): """ Twisted process protocol to run `p4 login` and enter our password in the stdin.""" def __init__(self, stdin, p4base): self.deferred = defer.Deferred() self.stdin = stdin.encode('ascii') self.stdout = b'' self.stderr = b'' self.p4base = p4base def connectionMade(self): if self.stdin: if debug_logging: log.msg("P4Poller: entering password for %s: %s" % (self.p4base, self.stdin)) self.transport.write(self.stdin) self.transport.closeStdin() def processEnded(self, reason): if debug_logging: log.msg("P4Poller: login process finished for %s: %s" % (self.p4base, reason.value.exitCode)) self.deferred.callback(reason.value.exitCode) def outReceived(self, data): if debug_logging: log.msg("P4Poller: login stdout for %s: %s" % (self.p4base, data)) self.stdout += data def errReceived(self, data): if debug_logging: log.msg("P4Poller: login stderr for %s: %s" % (self.p4base, data)) self.stderr += data def get_simple_split(branchfile): """Splits the branchfile argument and assuming branch is the first path component in branchfile, will return branch and file else None.""" index = branchfile.find('/') if index == -1: return None, None branch, file = branchfile.split('/', 1) return branch, file class P4Source(base.PollingChangeSource, util.ComparableMixin): """This source will poll a perforce repository for changes and submit them to the change master.""" compare_attrs = ("p4port", "p4user", "p4passwd", "p4base", "p4bin", "pollInterval", "pollAtLaunch", "server_tz") env_vars = ["P4CLIENT", "P4PORT", "P4PASSWD", "P4USER", "P4CHARSET", "PATH", "P4CONFIG"] changes_line_re = re.compile( r"Change (?P\d+) on \S+ by \S+@\S+ '.*'$") describe_header_re = re.compile( r"Change \d+ by (?P\S+)@\S+ on (?P.+)$") file_re = re.compile(r"^\.\.\. (?P[^#]+)#\d+ [/\w]+$") datefmt = '%Y/%m/%d %H:%M:%S' parent = None # filled in when we're added last_change = None loop = None def __init__(self, p4port=None, p4user=None, p4passwd=None, p4base='//', p4bin='p4', split_file=lambda branchfile: (None, branchfile), pollInterval=60 * 10, histmax=None, pollinterval=-2, encoding='utf8', project=None, name=None, use_tickets=False, ticket_login_interval=60 * 60 * 24, server_tz=None, pollAtLaunch=False, revlink=lambda branch, revision: (''), resolvewho=lambda who: (who)): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = "P4Source:%s:%s" % (p4port, p4base) super().__init__(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch) if project is None: project = '' if use_tickets and not p4passwd: config.error( "You need to provide a P4 password to use ticket authentication") if not callable(revlink): config.error( "You need to provide a valid callable for revlink") if not callable(resolvewho): config.error( "You need to provide a valid callable for resolvewho") self.p4port = p4port self.p4user = p4user self.p4passwd = p4passwd self.p4base = p4base self.p4bin = p4bin self.split_file = split_file self.encoding = encoding self.project = util.bytes2unicode(project) self.use_tickets = use_tickets self.ticket_login_interval = ticket_login_interval self.revlink_callable = revlink self.resolvewho_callable = resolvewho self.server_tz = dateutil.tz.gettz(server_tz) if server_tz else None if server_tz is not None and self.server_tz is None: raise P4PollerError("Failed to get timezone from server_tz string '{}'".format(server_tz)) self._ticket_passwd = None self._ticket_login_counter = 0 def describe(self): return "p4source %s %s" % (self.p4port, self.p4base) def poll(self): d = self._poll() d.addErrback(log.err, 'P4 poll failed on %s, %s' % (self.p4port, self.p4base)) return d def _get_process_output(self, args): env = {e: os.environ.get(e) for e in self.env_vars if os.environ.get(e)} d = utils.getProcessOutput(self.p4bin, args, env) return d def _acquireTicket(self, protocol): command = [self.p4bin, ] if self.p4port: command.extend(['-p', self.p4port]) if self.p4user: command.extend(['-u', self.p4user]) command.extend(['login', '-p']) command = [c.encode('utf-8') for c in command] reactor.spawnProcess(protocol, self.p4bin, command, env=os.environ) def _parseTicketPassword(self, stdout): try: stdout = stdout.decode(self.encoding, errors='strict') except Exception as e: raise P4PollerError('Failed to parse P4 ticket: {}'.format(e)) lines = stdout.splitlines() if len(lines) < 2: return None return lines[-1].strip() def _getPasswd(self): if self.use_tickets: return self._ticket_passwd return self.p4passwd @defer.inlineCallbacks def _poll(self): if self.use_tickets: self._ticket_login_counter -= 1 if self._ticket_login_counter <= 0: # Re-acquire the ticket and reset the counter. log.msg("P4Poller: (re)acquiring P4 ticket for %s..." % self.p4base) protocol = TicketLoginProtocol( self.p4passwd + "\n", self.p4base) self._acquireTicket(protocol) yield protocol.deferred self._ticket_passwd = self._parseTicketPassword( protocol.stdout) self._ticket_login_counter = max( self.ticket_login_interval / self.pollInterval, 1) if debug_logging: log.msg("P4Poller: got ticket password: %s" % self._ticket_passwd) log.msg( "P4Poller: next ticket acquisition in %d polls" % self._ticket_login_counter) args = [] if self.p4port: args.extend(['-p', self.p4port]) if self.p4user: args.extend(['-u', self.p4user]) if self.p4passwd: args.extend(['-P', self._getPasswd()]) args.extend(['changes']) if self.last_change is not None: args.extend( ['%s...@%d,#head' % (self.p4base, self.last_change + 1)]) else: args.extend(['-m', '1', '%s...' % (self.p4base,)]) result = yield self._get_process_output(args) # decode the result from its designated encoding try: result = bytes2unicode(result, self.encoding) except UnicodeError as ex: log.msg("{}: cannot fully decode {} in {}".format( ex, repr(result), self.encoding)) result = bytes2unicode(result, encoding=self.encoding, errors="replace") last_change = self.last_change changelists = [] for line in result.split('\n'): line = line.strip() if not line: continue m = self.changes_line_re.match(line) if not m: raise P4PollerError( "Unexpected 'p4 changes' output: %r" % result) num = int(m.group('num')) if last_change is None: # first time through, the poller just gets a "baseline" for where to # start on the next poll log.msg('P4Poller: starting at change %d' % num) self.last_change = num return changelists.append(num) changelists.reverse() # oldest first # Retrieve each sequentially. for num in changelists: args = [] if self.p4port: args.extend(['-p', self.p4port]) if self.p4user: args.extend(['-u', self.p4user]) if self.p4passwd: args.extend(['-P', self._getPasswd()]) args.extend(['describe', '-s', str(num)]) result = yield self._get_process_output(args) # decode the result from its designated encoding try: result = bytes2unicode(result, self.encoding) except UnicodeError as ex: log.msg( "P4Poller: couldn't decode changelist description: %s" % ex.encoding) log.msg("P4Poller: in object: %s" % ex.object) log.err("P4Poller: poll failed on %s, %s" % (self.p4port, self.p4base)) raise lines = result.split('\n') # SF#1555985: Wade Brainerd reports a stray ^M at the end of the date # field. The rstrip() is intended to remove that. lines[0] = lines[0].rstrip() m = self.describe_header_re.match(lines[0]) if not m: raise P4PollerError( "Unexpected 'p4 describe -s' result: %r" % result) who = self.resolvewho_callable(m.group('who')) when = datetime.datetime.strptime(m.group('when'), self.datefmt) if self.server_tz: # Convert from the server's timezone to the local timezone. when = when.replace(tzinfo=self.server_tz) when = util.datetime2epoch(when) comment_lines = [] lines.pop(0) # describe header lines.pop(0) # blank line while not lines[0].startswith('Affected files'): if lines[0].startswith('\t'): # comment is indented by one tab comment_lines.append(lines.pop(0)[1:]) else: lines.pop(0) # discard non comment line comments = '\n'.join(comment_lines) lines.pop(0) # affected files branch_files = {} # dict for branch mapped to file(s) while lines: line = lines.pop(0).strip() if not line: continue m = self.file_re.match(line) if not m: raise P4PollerError("Invalid file line: %r" % line) path = m.group('path') if path.startswith(self.p4base): branch, file = self.split_file(path[len(self.p4base):]) if (branch is None and file is None): continue if branch in branch_files: branch_files[branch].append(file) else: branch_files[branch] = [file] for branch in branch_files: yield self.master.data.updates.addChange( author=who, committer=None, files=branch_files[branch], comments=comments, revision=str(num), when_timestamp=when, branch=branch, project=self.project, revlink=self.revlink_callable(branch, str(num))) self.last_change = num buildbot-2.6.0/master/buildbot/changes/pb.py000066400000000000000000000143141361162603000210060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.changes import base from buildbot.pbutil import NewCredPerspective class ChangePerspective(NewCredPerspective): def __init__(self, master, prefix): self.master = master self.prefix = prefix def attached(self, mind): return self def detached(self, mind): pass def perspective_addChange(self, changedict): log.msg("perspective_addChange called") if 'revlink' in changedict and not changedict['revlink']: changedict['revlink'] = '' if 'repository' in changedict and not changedict['repository']: changedict['repository'] = '' if 'project' in changedict and not changedict['project']: changedict['project'] = '' if 'files' not in changedict or not changedict['files']: changedict['files'] = [] if 'committer' in changedict and not changedict['committer']: changedict['committer'] = None # rename arguments to new names. Note that the client still uses the # "old" names (who, when, and isdir), as they are not deprecated yet, # although the master will accept the new names (author, # when_timestamp). After a few revisions have passed, we # can switch the client to use the new names. if 'who' in changedict: changedict['author'] = changedict['who'] del changedict['who'] if 'when' in changedict: changedict['when_timestamp'] = changedict['when'] del changedict['when'] # turn any bytestring keys into unicode, assuming utf8 but just # replacing unknown characters. Ideally client would send us unicode # in the first place, but older clients do not, so this fallback is # useful. for key in changedict: if isinstance(changedict[key], bytes): changedict[key] = changedict[key].decode('utf8', 'replace') changedict['files'] = list(changedict['files']) for i, file in enumerate(changedict.get('files', [])): if isinstance(file, bytes): changedict['files'][i] = file.decode('utf8', 'replace') files = [] for path in changedict['files']: if self.prefix: if not path.startswith(self.prefix): # this file does not start with the prefix, so ignore it continue path = path[len(self.prefix):] files.append(path) changedict['files'] = files if not files: log.msg("No files listed in change... bit strange, but not fatal.") if "links" in changedict: log.msg("Found links: " + repr(changedict['links'])) del changedict['links'] d = self.master.data.updates.addChange(**changedict) # set the return value to None, so we don't get users depending on # getting a changeid d.addCallback(lambda _: None) return d class PBChangeSource(base.ChangeSource): compare_attrs = ("user", "passwd", "port", "prefix", "port") def __init__(self, user="change", passwd="changepw", port=None, prefix=None, name=None): if name is None: if prefix: name = "PBChangeSource:%s:%s" % (prefix, port) else: name = "PBChangeSource:%s" % (port,) super().__init__(name=name) self.user = user self.passwd = passwd self.port = port self.prefix = prefix self.registration = None self.registered_port = None def describe(self): portname = self.registered_port d = "PBChangeSource listener on " + str(portname) if self.prefix is not None: d += " (prefix '%s')" % self.prefix return d def _calculatePort(self, cfg): # calculate the new port, defaulting to the worker's PB port if # none was specified port = self.port if port is None: port = cfg.protocols.get('pb', {}).get('port') return port @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): port = self._calculatePort(new_config) if not port: config.error("No port specified for PBChangeSource, and no " "worker port configured") # and, if it's changed, re-register if port != self.registered_port and self.isActive(): yield self._unregister() yield self._register(port) yield super().reconfigServiceWithBuildbotConfig(new_config) @defer.inlineCallbacks def activate(self): port = self._calculatePort(self.master.config) yield self._register(port) def deactivate(self): return self._unregister() @defer.inlineCallbacks def _register(self, port): if not port: return self.registered_port = port self.registration = yield self.master.pbmanager.register(port, self.user, self.passwd, self.getPerspective) def _unregister(self): self.registered_port = None if self.registration: reg = self.registration self.registration = None return reg.unregister() return defer.succeed(None) def getPerspective(self, mind, username): assert username == self.user return ChangePerspective(self.master, self.prefix) buildbot-2.6.0/master/buildbot/changes/svnpoller.py000066400000000000000000000424251361162603000224350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Based on the work of Dave Peticolas for the P4poll # Changed to svn (using xml.dom.minidom) by Niklaus Giger # Hacked beyond recognition by Brian Warner import os import xml.dom.minidom from urllib.parse import quote_plus as urlquote_plus from twisted.internet import defer from twisted.internet import utils from twisted.python import log from buildbot import util from buildbot.changes import base from buildbot.util import bytes2unicode # these split_file_* functions are available for use as values to the # split_file= argument. def split_file_alwaystrunk(path): return dict(path=path) def split_file_branches(path): # turn "trunk/subdir/file.c" into (None, "subdir/file.c") # and "trunk/subdir/" into (None, "subdir/") # and "trunk/" into (None, "") # and "branches/1.5.x/subdir/file.c" into ("branches/1.5.x", "subdir/file.c") # and "branches/1.5.x/subdir/" into ("branches/1.5.x", "subdir/") # and "branches/1.5.x/" into ("branches/1.5.x", "") pieces = path.split('/') if len(pieces) > 1 and pieces[0] == 'trunk': return (None, '/'.join(pieces[1:])) elif len(pieces) > 2 and pieces[0] == 'branches': return ('/'.join(pieces[0:2]), '/'.join(pieces[2:])) return None def split_file_projects_branches(path): # turn projectname/trunk/subdir/file.c into dict(project=projectname, # branch=trunk, path=subdir/file.c) if "/" not in path: return None project, path = path.split("/", 1) f = split_file_branches(path) if f: info = dict(project=project, path=f[1]) if f[0]: info['branch'] = f[0] return info return f class SVNPoller(base.PollingChangeSource, util.ComparableMixin): """ Poll a Subversion repository for changes and submit them to the change master. """ compare_attrs = ("repourl", "split_file", "svnuser", "svnpasswd", "project", "pollInterval", "histmax", "svnbin", "category", "cachepath", "pollAtLaunch") secrets = ("svnuser", "svnpasswd") parent = None # filled in when we're added last_change = None loop = None def __init__(self, repourl, split_file=None, svnuser=None, svnpasswd=None, pollInterval=10 * 60, histmax=100, svnbin='svn', revlinktmpl='', category=None, project='', cachepath=None, pollinterval=-2, extra_args=None, name=None, pollAtLaunch=False): # for backward compatibility; the parameter used to be spelled with 'i' if pollinterval != -2: pollInterval = pollinterval if name is None: name = repourl super().__init__(name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch, svnuser=svnuser, svnpasswd=svnpasswd) if repourl.endswith("/"): repourl = repourl[:-1] # strip the trailing slash self.repourl = repourl self.extra_args = extra_args self.split_file = split_file or split_file_alwaystrunk self.svnuser = svnuser self.svnpasswd = svnpasswd self.revlinktmpl = revlinktmpl # include environment variables required for ssh-agent auth self.environ = os.environ.copy() self.svnbin = svnbin self.histmax = histmax self._prefix = None self.category = category if callable( category) else util.bytes2unicode(category) self.project = util.bytes2unicode(project) self.cachepath = cachepath if self.cachepath and os.path.exists(self.cachepath): try: with open(self.cachepath, "r") as f: self.last_change = int(f.read().strip()) log.msg("SVNPoller: SVNPoller(%s) setting last_change to %s" % ( self.repourl, self.last_change)) # try writing it, too with open(self.cachepath, "w") as f: f.write(str(self.last_change)) except Exception: self.cachepath = None log.msg(("SVNPoller: SVNPoller(%s) cache file corrupt or unwriteable; " + "skipping and not using") % self.repourl) log.err() def describe(self): return "SVNPoller: watching %s" % self.repourl def poll(self): # Our return value is only used for unit testing. # we need to figure out the repository root, so we can figure out # repository-relative pathnames later. Each REPOURL is in the form # (ROOT)/(PROJECT)/(BRANCH)/(FILEPATH), where (ROOT) is something # like svn://svn.twistedmatrix.com/svn/Twisted (i.e. there is a # physical repository at /svn/Twisted on that host), (PROJECT) is # something like Projects/Twisted (i.e. within the repository's # internal namespace, everything under Projects/Twisted/ has # something to do with Twisted, but these directory names do not # actually appear on the repository host), (BRANCH) is something like # "trunk" or "branches/2.0.x", and (FILEPATH) is a tree-relative # filename like "twisted/internet/defer.py". # our self.repourl attribute contains (ROOT)/(PROJECT) combined # together in a way that we can't separate without svn's help. If the # user is not using the split_file= argument, then self.repourl might # be (ROOT)/(PROJECT)/(BRANCH) . In any case, the filenames we will # get back from 'svn log' will be of the form # (PROJECT)/(BRANCH)/(FILEPATH), but we want to be able to remove # that (PROJECT) prefix from them. To do this without requiring the # user to tell us how repourl is split into ROOT and PROJECT, we do an # 'svn info --xml' command at startup. This command will include a # element that tells us ROOT. We then strip this prefix from # self.repourl to determine PROJECT, and then later we strip the # PROJECT prefix from the filenames reported by 'svn log --xml' to # get a (BRANCH)/(FILEPATH) that can be passed to split_file() to # turn into separate BRANCH and FILEPATH values. # whew. if self.project: log.msg("SVNPoller: polling " + self.project) else: log.msg("SVNPoller: polling") d = defer.succeed(None) if not self._prefix: d.addCallback(lambda _: self.get_prefix()) @d.addCallback def set_prefix(prefix): self._prefix = prefix d.addCallback(self.get_logs) d.addCallback(self.parse_logs) d.addCallback(self.get_new_logentries) d.addCallback(self.create_changes) d.addCallback(self.submit_changes) d.addCallback(self.finished_ok) # eat errors d.addErrback(log.err, 'SVNPoller: Error in while polling') return d def getProcessOutput(self, args): # this exists so we can override it during the unit tests d = utils.getProcessOutput(self.svnbin, args, self.environ) return d def get_prefix(self): args = ["info", "--xml", "--non-interactive", self.repourl] if self.svnuser: args.append("--username=%s" % self.svnuser) if self.svnpasswd is not None: args.append("--password=%s" % self.svnpasswd) if self.extra_args: args.extend(self.extra_args) d = self.getProcessOutput(args) @d.addCallback def determine_prefix(output): try: doc = xml.dom.minidom.parseString(output) except xml.parsers.expat.ExpatError: log.msg("SVNPoller: SVNPoller.get_prefix: ExpatError in '%s'" % output) raise rootnodes = doc.getElementsByTagName("root") if not rootnodes: # this happens if the URL we gave was already the root. In this # case, our prefix is empty. self._prefix = "" return self._prefix rootnode = rootnodes[0] root = "".join([c.data for c in rootnode.childNodes]) # root will be a unicode string if not self.repourl.startswith(root): log.msg(format="Got root %(root)r from `svn info`, but it is " "not a prefix of the configured repourl", repourl=self.repourl, root=root) raise RuntimeError("Configured repourl doesn't match svn root") prefix = self.repourl[len(root):] if prefix.startswith("/"): prefix = prefix[1:] log.msg("SVNPoller: repourl=%s, root=%s, so prefix=%s" % (self.repourl, root, prefix)) return prefix return d def get_logs(self, _): args = [] args.extend(["log", "--xml", "--verbose", "--non-interactive"]) if self.svnuser: args.extend(["--username=%s" % self.svnuser]) if self.svnpasswd is not None: args.extend(["--password=%s" % self.svnpasswd]) if self.extra_args: args.extend(self.extra_args) args.extend(["--limit=%d" % (self.histmax), self.repourl]) d = self.getProcessOutput(args) return d def parse_logs(self, output): # parse the XML output, return a list of nodes try: doc = xml.dom.minidom.parseString(output) except xml.parsers.expat.ExpatError: log.msg( "SVNPoller: SVNPoller.parse_logs: ExpatError in '%s'" % output) raise logentries = doc.getElementsByTagName("logentry") return logentries def get_new_logentries(self, logentries): last_change = old_last_change = self.last_change # given a list of logentries, calculate new_last_change, and # new_logentries, where new_logentries contains only the ones after # last_change new_last_change = None new_logentries = [] if logentries: new_last_change = int(logentries[0].getAttribute("revision")) if last_change is None: # if this is the first time we've been run, ignore any changes # that occurred before now. This prevents a build at every # startup. log.msg('SVNPoller: starting at change %s' % new_last_change) elif last_change == new_last_change: # an unmodified repository will hit this case log.msg('SVNPoller: no changes') else: for el in logentries: if last_change == int(el.getAttribute("revision")): break new_logentries.append(el) new_logentries.reverse() # return oldest first self.last_change = new_last_change log.msg('SVNPoller: _process_changes %s .. %s' % (old_last_change, new_last_change)) return new_logentries def _get_text(self, element, tag_name): try: child_nodes = element.getElementsByTagName(tag_name)[0].childNodes text = "".join([t.data for t in child_nodes]) except IndexError: text = "unknown" return text def _transform_path(self, path): if not path.startswith(self._prefix): log.msg(format="SVNPoller: ignoring path '%(path)s' which doesn't" "start with prefix '%(prefix)s'", path=path, prefix=self._prefix) return relative_path = path[len(self._prefix):] if relative_path.startswith("/"): relative_path = relative_path[1:] where = self.split_file(relative_path) # 'where' is either None, (branch, final_path) or a dict if not where: return if isinstance(where, tuple): where = dict(branch=where[0], path=where[1]) return where def create_changes(self, new_logentries): changes = [] for el in new_logentries: revision = str(el.getAttribute("revision")) revlink = '' if self.revlinktmpl and revision: revlink = self.revlinktmpl % urlquote_plus(revision) revlink = str(revlink) log.msg("Adding change revision %s" % (revision,)) author = self._get_text(el, "author") comments = self._get_text(el, "msg") # there is a "date" field, but it provides localtime in the # repository's timezone, whereas we care about buildmaster's # localtime (since this will get used to position the boxes on # the Waterfall display, etc). So ignore the date field, and # addChange will fill in with the current time branches = {} try: pathlist = el.getElementsByTagName("paths")[0] except IndexError: # weird, we got an empty revision log.msg("ignoring commit with no paths") continue for p in pathlist.getElementsByTagName("path"): kind = p.getAttribute("kind") action = p.getAttribute("action") path = "".join([t.data for t in p.childNodes]) if path.startswith("/"): path = path[1:] if kind == "dir" and not path.endswith("/"): path += "/" where = self._transform_path(path) # if 'where' is None, the file was outside any project that # we care about and we should ignore it if where: branch = where.get("branch", None) filename = where["path"] if branch not in branches: branches[branch] = { 'files': [], 'number_of_directories': 0} if filename == "": # root directory of branch branches[branch]['files'].append(filename) branches[branch]['number_of_directories'] += 1 elif filename.endswith("/"): # subdirectory of branch branches[branch]['files'].append(filename[:-1]) branches[branch]['number_of_directories'] += 1 else: branches[branch]['files'].append(filename) if "action" not in branches[branch]: branches[branch]['action'] = action for key in ("repository", "project", "codebase"): if key in where: branches[branch][key] = where[key] for branch in branches: action = branches[branch]['action'] files = branches[branch]['files'] number_of_directories_changed = branches[ branch]['number_of_directories'] number_of_files_changed = len(files) if (action == 'D' and number_of_directories_changed == 1 and number_of_files_changed == 1 and files[0] == ''): log.msg("Ignoring deletion of branch '%s'" % branch) else: chdict = dict( author=author, committer=None, # weakly assume filenames are utf-8 files=[bytes2unicode(f, 'utf-8', 'replace') for f in files], comments=comments, revision=revision, branch=util.bytes2unicode(branch), revlink=revlink, category=self.category, repository=util.bytes2unicode( branches[branch].get('repository', self.repourl)), project=util.bytes2unicode( branches[branch].get('project', self.project)), codebase=util.bytes2unicode( branches[branch].get('codebase', None))) changes.append(chdict) return changes @defer.inlineCallbacks def submit_changes(self, changes): for chdict in changes: yield self.master.data.updates.addChange(src='svn', **chdict) def finished_ok(self, res): if self.cachepath: with open(self.cachepath, "w") as f: f.write(str(self.last_change)) log.msg("SVNPoller: finished polling %s" % res) return res buildbot-2.6.0/master/buildbot/clients/000077500000000000000000000000001361162603000200615ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/clients/__init__.py000066400000000000000000000000001361162603000221600ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/clients/base.py000066400000000000000000000051541361162603000213520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from twisted.spread import pb class StatusClient(pb.Referenceable): """To use this, call my .connected method with a RemoteReference to the buildmaster's StatusClientPerspective object. """ def __init__(self, events): self.builders = {} self.events = events def connected(self, remote): log.msg("connected") self.remote = remote remote.callRemote("subscribe", self.events, 5, self) def remote_builderAdded(self, buildername, builder): log.msg("builderAdded", buildername) def remote_builderRemoved(self, buildername): log.msg("builderRemoved", buildername) def remote_builderChangedState(self, buildername, state, eta): log.msg("builderChangedState", buildername, state, eta) def remote_buildStarted(self, buildername, build): log.msg("buildStarted", buildername) def remote_buildFinished(self, buildername, build, results): log.msg("buildFinished", results) def remote_buildETAUpdate(self, buildername, build, eta): log.msg("ETA", buildername, eta) def remote_stepStarted(self, buildername, build, stepname, step): log.msg("stepStarted", buildername, stepname) def remote_stepFinished(self, buildername, build, stepname, step, results): log.msg("stepFinished", buildername, stepname, results) def remote_logStarted(self, buildername, build, stepname, step, logname, log): log.msg("logStarted", buildername, stepname) def remote_logFinished(self, buildername, build, stepname, step, logname, log): log.msg("logFinished", buildername, stepname) def remote_logChunk(self, buildername, build, stepname, step, logname, log, channel, text): ChunkTypes = ["STDOUT", "STDERR", "HEADER"] log.msg("logChunk[%s]: %s" % (ChunkTypes[channel], text)) buildbot-2.6.0/master/buildbot/clients/sendchange.py000066400000000000000000000051301361162603000225310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.cred import credentials from twisted.internet import reactor from twisted.spread import pb from buildbot.util import unicode2bytes class Sender: def __init__(self, master, auth=('change', 'changepw'), encoding='utf8'): self.username = unicode2bytes(auth[0]) self.password = unicode2bytes(auth[1]) self.host, self.port = master.split(":") self.port = int(self.port) self.encoding = encoding def send(self, branch, revision, comments, files, who=None, category=None, when=None, properties=None, repository='', vc=None, project='', revlink='', codebase=None): if properties is None: properties = {} change = {'project': project, 'repository': repository, 'who': who, 'files': files, 'comments': comments, 'branch': branch, 'revision': revision, 'category': category, 'when': when, 'properties': properties, 'revlink': revlink, 'src': vc} # codebase is only sent if set; this won't work with masters older than # 0.8.7 if codebase: change['codebase'] = codebase for key in change: if isinstance(change[key], bytes): change[key] = change[key].decode(self.encoding, 'replace') change['files'] = list(change['files']) for i, file in enumerate(change.get('files', [])): if isinstance(file, bytes): change['files'][i] = file.decode(self.encoding, 'replace') f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(self.username, self.password)) reactor.connectTCP(self.host, self.port, f) @d.addCallback def call_addChange(remote): d = remote.callRemote('addChange', change) d.addCallback(lambda res: remote.broker.transport.loseConnection()) return d return d buildbot-2.6.0/master/buildbot/clients/tryclient.py000066400000000000000000000771141361162603000224620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import random import re import shlex import string import sys import time from twisted.cred import credentials from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import task from twisted.internet import utils from twisted.python import log from twisted.python import runtime from twisted.python.procutils import which from twisted.spread import pb from buildbot.status import builder from buildbot.util import bytes2unicode from buildbot.util import now from buildbot.util import unicode2bytes from buildbot.util.eventual import fireEventually class SourceStamp: def __init__(self, branch, revision, patch, repository=''): self.branch = branch self.revision = revision self.patch = patch self.repository = repository def output(*msg): print(' '.join([str(m)for m in msg])) class SourceStampExtractor: def __init__(self, treetop, branch, repository): self.treetop = treetop self.repository = repository self.branch = branch exes = which(self.vcexe) if not exes: output("Could not find executable '{}'.".format(self.vcexe)) sys.exit(1) self.exe = exes[0] def dovc(self, cmd): """This accepts the arguments of a command, without the actual command itself.""" env = os.environ.copy() env['LC_ALL'] = "C" d = utils.getProcessOutputAndValue(self.exe, cmd, env=env, path=self.treetop) d.addCallback(self._didvc, cmd) return d def _didvc(self, res, cmd): (stdout, stderr, code) = res # 'bzr diff' sets rc=1 if there were any differences. # cvs does something similar, so don't bother requiring rc=0. return stdout def get(self): """Return a Deferred that fires with a SourceStamp instance.""" d = self.getBaseRevision() d.addCallback(self.getPatch) d.addCallback(self.done) return d def readPatch(self, diff, patchlevel): if not diff: diff = None self.patch = (patchlevel, bytes2unicode(diff)) def done(self, res): if not self.repository: self.repository = self.treetop # TODO: figure out the branch and project too ss = SourceStamp(self.branch, self.baserev, self.patch, repository=self.repository) return ss class CVSExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "cvs" def getBaseRevision(self): # this depends upon our local clock and the repository's clock being # reasonably synchronized with each other. We express everything in # UTC because the '%z' format specifier for strftime doesn't always # work. self.baserev = time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime(now())) return defer.succeed(None) def getPatch(self, res): # the -q tells CVS to not announce each directory as it works if self.branch is not None: # 'cvs diff' won't take both -r and -D at the same time (it # ignores the -r). As best I can tell, there is no way to make # cvs give you a diff relative to a timestamp on the non-trunk # branch. A bare 'cvs diff' will tell you about the changes # relative to your checked-out versions, but I know of no way to # find out what those checked-out versions are. output("Sorry, CVS 'try' builds don't work with branches") sys.exit(1) args = ['-q', 'diff', '-u', '-D', self.baserev] d = self.dovc(args) d.addCallback(self.readPatch, self.patchlevel) return d class SVNExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "svn" def getBaseRevision(self): d = self.dovc(["status", "-u"]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): # svn shows the base revision for each file that has been modified or # which needs an update. You can update each file to a different # version, so each file is displayed with its individual base # revision. It also shows the repository-wide latest revision number # on the last line ("Status against revision: \d+"). # for our purposes, we use the latest revision number as the "base" # revision, and get a diff against that. This means we will get # reverse-diffs for local files that need updating, but the resulting # tree will still be correct. The only weirdness is that the baserev # that we emit may be different than the version of the tree that we # first checked out. # to do this differently would probably involve scanning the revision # numbers to find the max (or perhaps the min) revision, and then # using that as a base. for line in res.split(b"\n"): m = re.search(br'^Status against revision:\s+(\d+)', line) if m: self.baserev = m.group(1) return output( b"Could not find 'Status against revision' in SVN output: " + res) sys.exit(1) def getPatch(self, res): d = self.dovc(["diff", "-r{}".format(self.baserev)]) d.addCallback(self.readPatch, self.patchlevel) return d class BzrExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "bzr" def getBaseRevision(self): d = self.dovc(["revision-info", "-rsubmit:"]) d.addCallback(self.get_revision_number) return d def get_revision_number(self, out): revno, revid = out.split() self.baserev = 'revid:' + revid return def getPatch(self, res): d = self.dovc(["diff", "-r{}..".format(self.baserev)]) d.addCallback(self.readPatch, self.patchlevel) return d class MercurialExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "hg" def _didvc(self, res, cmd): (stdout, stderr, code) = res if code: cs = ' '.join(['hg'] + cmd) if stderr: stderr = '\n' + stderr.rstrip() raise RuntimeError("{} returned {} {}".format(cs, code, stderr)) return stdout @defer.inlineCallbacks def getBaseRevision(self): upstream = "" if self.repository: upstream = "r'%s'" % self.repository output = '' try: output = yield self.dovc(["log", "--template", "{node}\\n", "-r", "max(::. - outgoing(%s))" % upstream]) except RuntimeError: # outgoing() will abort if no default-push/default path is # configured if upstream: raise # fall back to current working directory parent output = yield self.dovc(["log", "--template", "{node}\\n", "-r", "p1()"]) m = re.search(br'^(\w+)', output) if not m: raise RuntimeError( "Revision {!r} is not in the right format".format(output)) self.baserev = m.group(0) def getPatch(self, res): d = self.dovc(["diff", "-r", self.baserev]) d.addCallback(self.readPatch, self.patchlevel) return d class PerforceExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "p4" def getBaseRevision(self): d = self.dovc(["changes", "-m1", "..."]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): # # extract the base change number # m = re.search(br'Change (\d+)', res) if m: self.baserev = m.group(1) return output(b"Could not find change number in output: " + res) sys.exit(1) def readPatch(self, res, patchlevel): # # extract the actual patch from "res" # if not self.branch: output("you must specify a branch") sys.exit(1) mpatch = "" found = False for line in res.split("\n"): m = re.search('==== //depot/' + self.branch + r'/([\w/\.\d\-_]+)#(\d+) -', line) if m: mpatch += "--- %s#%s\n" % (m.group(1), m.group(2)) mpatch += "+++ %s\n" % (m.group(1)) found = True else: mpatch += line mpatch += "\n" if not found: output(b"could not parse patch file") sys.exit(1) self.patch = (patchlevel, mpatch) def getPatch(self, res): d = self.dovc(["diff"]) d.addCallback(self.readPatch, self.patchlevel) return d class DarcsExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "darcs" def getBaseRevision(self): d = self.dovc(["changes", "--context"]) d.addCallback(self.parseStatus) return d def parseStatus(self, res): self.baserev = res # the whole context file def getPatch(self, res): d = self.dovc(["diff", "-u"]) d.addCallback(self.readPatch, self.patchlevel) return d class GitExtractor(SourceStampExtractor): patchlevel = 1 vcexe = "git" config = None def getBaseRevision(self): # If a branch is specified, parse out the rev it points to # and extract the local name. if self.branch: d = self.dovc(["rev-parse", self.branch]) d.addCallback(self.override_baserev) d.addCallback(self.extractLocalBranch) return d d = self.dovc(["branch", "--no-color", "-v", "--no-abbrev"]) d.addCallback(self.parseStatus) return d # remove remote-prefix from self.branch (assumes format /) # this uses "git remote" to retrieve all configured remote names def extractLocalBranch(self, res): if '/' in self.branch: d = self.dovc(["remote"]) d.addCallback(self.fixBranch) return d # strip remote prefix from self.branch def fixBranch(self, remotes): for l in bytes2unicode(remotes).split("\n"): r = l.strip() if r and self.branch.startswith(r + "/"): self.branch = self.branch[len(r) + 1:] break def readConfig(self): if self.config: return defer.succeed(self.config) d = self.dovc(["config", "-l"]) d.addCallback(self.parseConfig) return d def parseConfig(self, res): self.config = {} for l in res.split(b"\n"): if l.strip(): parts = l.strip().split(b"=", 2) if len(parts) < 2: parts.append('true') self.config[parts[0]] = parts[1] return self.config def parseTrackingBranch(self, res): # If we're tracking a remote, consider that the base. remote = self.config.get(b"branch." + self.branch + b".remote") ref = self.config.get(b"branch." + self.branch + b".merge") if remote and ref: remote_branch = ref.split(b"/", 2)[-1] baserev = remote + b"/" + remote_branch else: baserev = b"master" d = self.dovc(["rev-parse", baserev]) d.addCallback(self.override_baserev) return d def override_baserev(self, res): self.baserev = bytes2unicode(res).strip() def parseStatus(self, res): # The current branch is marked by '*' at the start of the # line, followed by the branch name and the SHA1. # # Branch names may contain pretty much anything but whitespace. m = re.search(br'^\* (\S+)\s+([0-9a-f]{40})', res, re.MULTILINE) if m: self.baserev = m.group(2) self.branch = m.group(1) d = self.readConfig() d.addCallback(self.parseTrackingBranch) return d output(b"Could not find current GIT branch: " + res) sys.exit(1) def getPatch(self, res): d = self.dovc(["diff", "--src-prefix=a/", "--dst-prefix=b/", "--no-textconv", "--no-ext-diff", self.baserev]) d.addCallback(self.readPatch, self.patchlevel) return d class MonotoneExtractor(SourceStampExtractor): patchlevel = 0 vcexe = "mtn" def getBaseRevision(self): d = self.dovc(["automate", "get_base_revision_id"]) d.addCallback(self.parseStatus) return d def parseStatus(self, output): hash = output.strip() if len(hash) != 40: self.baserev = None self.baserev = hash def getPatch(self, res): d = self.dovc(["diff"]) d.addCallback(self.readPatch, self.patchlevel) return d def getSourceStamp(vctype, treetop, branch=None, repository=None): if vctype == "cvs": cls = CVSExtractor elif vctype == "svn": cls = SVNExtractor elif vctype == "bzr": cls = BzrExtractor elif vctype == "hg": cls = MercurialExtractor elif vctype == "p4": cls = PerforceExtractor elif vctype == "darcs": cls = DarcsExtractor elif vctype == "git": cls = GitExtractor elif vctype == "mtn": cls = MonotoneExtractor elif vctype == "none": return defer.succeed(SourceStamp("", "", (1, ""), "")) else: output("unknown vctype '{}'".format(vctype)) sys.exit(1) return cls(treetop, branch, repository).get() def ns(s): return "{}:{},".format(len(s), s) def createJobfile(jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties): # Determine job file version from provided arguments if properties: version = 5 elif comment: version = 4 elif who: version = 3 else: version = 2 job = "" job += ns(str(version)) if version < 5: job += ns(jobid) job += ns(branch) job += ns(str(baserev)) job += ns("{}".format(patch_level)) job += ns(patch_body or "") job += ns(repository) job += ns(project) if (version >= 3): job += ns(who) if (version >= 4): job += ns(comment) for bn in builderNames: job += ns(bn) else: job += ns( json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': str(baserev), 'patch_level': patch_level, 'patch_body': patch_body, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, })) return job def getTopdir(topfile, start=None): """walk upwards from the current directory until we find this topfile""" if not start: start = os.getcwd() here = start toomany = 20 while toomany > 0: if os.path.exists(os.path.join(here, topfile)): return here next = os.path.dirname(here) if next == here: break # we've hit the root here = next toomany -= 1 output("Unable to find topfile '{}' anywhere " "from {} upwards".format(topfile, start)) sys.exit(1) class RemoteTryPP(protocol.ProcessProtocol): def __init__(self, job): self.job = job self.d = defer.Deferred() def connectionMade(self): self.transport.write(unicode2bytes(self.job)) self.transport.closeStdin() def outReceived(self, data): sys.stdout.write(bytes2unicode(data)) def errReceived(self, data): sys.stderr.write(bytes2unicode(data)) def processEnded(self, status_object): sig = status_object.value.signal rc = status_object.value.exitCode if sig is not None or rc != 0: self.d.errback(RuntimeError("remote 'buildbot tryserver' failed" ": sig={}, rc={}".format(sig, rc))) return self.d.callback((sig, rc)) class Try(pb.Referenceable): buildsetStatus = None quiet = False printloop = False def __init__(self, config): self.config = config self.connect = self.getopt('connect') if self.connect not in ['ssh', 'pb']: output("you must specify a connect style: ssh or pb") sys.exit(1) self.builderNames = self.getopt('builders') self.project = self.getopt('project', '') self.who = self.getopt('who') self.comment = self.getopt('comment') def getopt(self, config_name, default=None): value = self.config.get(config_name) if value is None or value == []: value = default return value def createJob(self): # returns a Deferred which fires when the job parameters have been # created # generate a random (unique) string. It would make sense to add a # hostname and process ID here, but a) I suspect that would cause # windows portability problems, and b) really this is good enough self.bsid = "{}-{}".format(time.time(), random.randint(0, 1000000)) # common options branch = self.getopt("branch") difffile = self.config.get("diff") if difffile: baserev = self.config.get("baserev") if difffile == "-": diff = sys.stdin.read() else: with open(difffile, "r") as f: diff = f.read() if not diff: diff = None patch = (self.config['patchlevel'], diff) ss = SourceStamp( branch, baserev, patch, repository=self.getopt("repository")) d = defer.succeed(ss) else: vc = self.getopt("vc") if vc in ("cvs", "svn"): # we need to find the tree-top topdir = self.getopt("topdir") if topdir: treedir = os.path.expanduser(topdir) else: topfile = self.getopt("topfile") if topfile: treedir = getTopdir(topfile) else: output("Must specify topdir or topfile.") sys.exit(1) else: treedir = os.getcwd() d = getSourceStamp(vc, treedir, branch, self.getopt("repository")) d.addCallback(self._createJob_1) return d def _createJob_1(self, ss): self.sourcestamp = ss patchlevel, diff = ss.patch if diff is None: raise RuntimeError("There is no patch to try, diff is empty.") if self.connect == "ssh": revspec = ss.revision if revspec is None: revspec = "" self.jobfile = createJobfile( self.bsid, ss.branch or "", revspec, patchlevel, diff, ss.repository, self.project, self.who, self.comment, self.builderNames, self.config.get('properties', {})) def fakeDeliverJob(self): # Display the job to be delivered, but don't perform delivery. ss = self.sourcestamp output("Job:\n\tRepository: {}\n\tProject: {}\n\tBranch: {}\n\t" "Revision: {}\n\tBuilders: {}\n{}".format( ss.repository, self.project, ss.branch, ss.revision, self.builderNames, ss.patch[1])) d = defer.Deferred() d.callback(True) return d def deliverJob(self): # returns a Deferred that fires when the job has been delivered if self.connect == "ssh": tryhost = self.getopt("host") tryport = self.getopt("port") tryuser = self.getopt("username") trydir = self.getopt("jobdir") buildbotbin = self.getopt("buildbotbin") ssh_command = self.getopt("ssh") if not ssh_command: ssh_commands = which("ssh") if not ssh_commands: raise RuntimeError("couldn't find ssh executable, make sure " "it is available in the PATH") argv = [ssh_commands[0]] else: # Split the string on whitespace to allow passing options in # ssh command too, but preserving whitespace inside quotes to # allow using paths with spaces in them which is common under # Windows. And because Windows uses backslashes in paths, we # can't just use shlex.split there as it would interpret them # specially, so do it by hand. if runtime.platformType == 'win32': # Note that regex here matches the arguments, not the # separators, as it's simpler to do it like this. And then we # just need to get all of them together using the slice and # also remove the quotes from those that were quoted. argv = [string.strip(a, '"') for a in re.split(r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]] else: # Do use standard tokenization logic under POSIX. argv = shlex.split(ssh_command) if tryuser: argv += ["-l", tryuser] if tryport: argv += ["-p", tryport] argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir] pp = RemoteTryPP(self.jobfile) reactor.spawnProcess(pp, argv[0], argv, os.environ) d = pp.d return d if self.connect == "pb": user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) d.addCallback(self._deliverJob_pb) return d raise RuntimeError("unknown connecttype '{}', " "should be 'ssh' or 'pb'".format(self.connect)) def _deliverJob_pb(self, remote): ss = self.sourcestamp output("Delivering job; comment=", self.comment) d = remote.callRemote("try", ss.branch, ss.revision, ss.patch, ss.repository, self.project, self.builderNames, self.who, self.comment, self.config.get('properties', {})) d.addCallback(self._deliverJob_pb2) return d def _deliverJob_pb2(self, status): self.buildsetStatus = status return status def getStatus(self): # returns a Deferred that fires when the builds have finished, and # may emit status messages while we wait wait = bool(self.getopt("wait")) if not wait: output("not waiting for builds to finish") elif self.connect == "ssh": output("waiting for builds with ssh is not supported") else: self.running = defer.Deferred() assert self.buildsetStatus self._getStatus_1() return self.running def _getStatus_1(self, res=None): if res: self.buildsetStatus = res # gather the set of BuildRequests d = self.buildsetStatus.callRemote("getBuildRequests") d.addCallback(self._getStatus_2) def _getStatus_2(self, brs): self.builderNames = [] self.buildRequests = {} # self.builds holds the current BuildStatus object for each one self.builds = {} # self.outstanding holds the list of builderNames which haven't # finished yet self.outstanding = [] # self.results holds the list of build results. It holds a tuple of # (result, text) self.results = {} # self.currentStep holds the name of the Step that each build is # currently running self.currentStep = {} # self.ETA holds the expected finishing time (absolute time since # epoch) self.ETA = {} for n, br in brs: self.builderNames.append(n) self.buildRequests[n] = br self.builds[n] = None self.outstanding.append(n) self.results[n] = [None, None] self.currentStep[n] = None self.ETA[n] = None # get new Builds for this buildrequest. We follow each one until # it finishes or is interrupted. br.callRemote("subscribe", self) # now that those queries are in transit, we can start the # display-status-every-30-seconds loop if not self.getopt("quiet"): self.printloop = task.LoopingCall(self.printStatus) self.printloop.start(3, now=False) # these methods are invoked by the status objects we've subscribed to def remote_newbuild(self, bs, builderName): if self.builds[builderName]: self.builds[builderName].callRemote("unsubscribe", self) self.builds[builderName] = bs bs.callRemote("subscribe", self, 20) d = bs.callRemote("waitUntilFinished") d.addCallback(self._build_finished, builderName) def remote_stepStarted(self, buildername, build, stepname, step): self.currentStep[buildername] = stepname def remote_stepFinished(self, buildername, build, stepname, step, results): pass def remote_buildETAUpdate(self, buildername, build, eta): self.ETA[buildername] = now() + eta def _build_finished(self, bs, builderName): # we need to collect status from the newly-finished build. We don't # remove the build from self.outstanding until we've collected # everything we want. self.builds[builderName] = None self.ETA[builderName] = None self.currentStep[builderName] = "finished" d = bs.callRemote("getResults") d.addCallback(self._build_finished_2, bs, builderName) return d def _build_finished_2(self, results, bs, builderName): self.results[builderName][0] = results d = bs.callRemote("getText") d.addCallback(self._build_finished_3, builderName) return d def _build_finished_3(self, text, builderName): self.results[builderName][1] = text self.outstanding.remove(builderName) if not self.outstanding: # all done return self.statusDone() def printStatus(self): try: names = sorted(self.buildRequests.keys()) for n in names: if n not in self.outstanding: # the build is finished, and we have results code, text = self.results[n] t = builder.Results[code] if text: t += " ({})".format(" ".join(text)) elif self.builds[n]: t = self.currentStep[n] or "building" if self.ETA[n]: t += " [ETA {}s]".format(self.ETA[n] - now()) else: t = "no build" self.announce("{}: {}".format(n, t)) self.announce("") except Exception: log.err(None, "printing status") def statusDone(self): if self.printloop: self.printloop.stop() self.printloop = None output("All Builds Complete") # TODO: include a URL for all failing builds names = sorted(self.buildRequests.keys()) happy = True for n in names: code, text = self.results[n] t = "{}: {}".format(n, builder.Results[code]) if text: t += " ({})".format(" ".join(text)) output(t) if code != builder.SUCCESS: happy = False if happy: self.exitcode = 0 else: self.exitcode = 1 self.running.callback(self.exitcode) def getAvailableBuilderNames(self): # This logs into the master using the PB protocol to # get the names of the configured builders that can # be used for the --builder argument if self.connect == "pb": user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) d.addCallback(self._getBuilderNames) return d if self.connect == "ssh": output("Cannot get available builders over ssh.") sys.exit(1) raise RuntimeError( "unknown connecttype '{}', should be 'pb'".format(self.connect)) def _getBuilderNames(self, remote): d = remote.callRemote("getAvailableBuilderNames") d.addCallback(self._getBuilderNames2) d.addCallback(lambda _: remote.broker.transport.loseConnection()) return d def _getBuilderNames2(self, buildernames): output("The following builders are available for the try scheduler: ") for buildername in buildernames: output(buildername) def announce(self, message): if not self.quiet: output(message) def run(self, _inTests=False): # we can't do spawnProcess until we're inside reactor.run(), so get # funky output("using '{}' connect method".format(self.connect)) self.exitcode = 0 d = fireEventually(None) if bool(self.config.get("get-builder-names")): d.addCallback(lambda res: self.getAvailableBuilderNames()) else: d.addCallback(lambda res: self.createJob()) d.addCallback(lambda res: self.announce("job created")) deliver = self.deliverJob if bool(self.config.get("dryrun")): deliver = self.fakeDeliverJob d.addCallback(lambda res: deliver()) d.addCallback(lambda res: self.announce("job has been delivered")) d.addCallback(lambda res: self.getStatus()) d.addErrback(self.trapSystemExit) d.addErrback(log.err) d.addCallback(self.cleanup) if _inTests: return d d.addCallback(lambda res: reactor.stop()) reactor.run() sys.exit(self.exitcode) def trapSystemExit(self, why): why.trap(SystemExit) self.exitcode = why.value.code def cleanup(self, res=None): if self.buildsetStatus: self.buildsetStatus.broker.transport.loseConnection() buildbot-2.6.0/master/buildbot/clients/usersclient.py000066400000000000000000000036261361162603000230020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. from twisted.cred import credentials from twisted.internet import reactor from twisted.spread import pb class UsersClient: """ Client set up in buildbot.scripts.runner to send `buildbot user` args over a PB connection to perspective_commandline that will execute the args on the database. """ def __init__(self, master, username, password, port): self.host = master self.username = username self.password = password self.port = int(port) def send(self, op, bb_username, bb_password, ids, info): f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(self.username, self.password)) reactor.connectTCP(self.host, self.port, f) @d.addCallback def call_commandline(remote): d = remote.callRemote("commandline", op, bb_username, bb_password, ids, info) @d.addCallback def returnAndLose(res): remote.broker.transport.loseConnection() return res return d return d buildbot-2.6.0/master/buildbot/config.py000077500000000000000000001152071361162603000202500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import inspect import os import re import sys import traceback import warnings from types import MethodType from twisted.python import failure from twisted.python import log from twisted.python.compat import execfile from zope.interface import implementer from buildbot import interfaces from buildbot import locks from buildbot import util from buildbot.interfaces import IRenderable from buildbot.revlinks import default_revlink_matcher from buildbot.util import ComparableMixin from buildbot.util import bytes2unicode from buildbot.util import config as util_config from buildbot.util import identifiers as util_identifiers from buildbot.util import safeTranslate from buildbot.util import service as util_service from buildbot.www import auth from buildbot.www import avatar from buildbot.www.authz import authz class ConfigErrors(Exception): def __init__(self, errors=None): if errors is None: errors = [] self.errors = errors[:] def __str__(self): return "\n".join(self.errors) def addError(self, msg): self.errors.append(msg) def merge(self, errors): self.errors.extend(errors.errors) def __bool__(self): return bool(len(self.errors)) _errors = None DEFAULT_DB_URL = 'sqlite:///state.sqlite' RESERVED_UNDERSCORE_NAMES = ["__Janitor"] def error(error, always_raise=False): if _errors is not None and not always_raise: _errors.addError(error) else: raise ConfigErrors([error]) class ConfigWarning(Warning): """ Warning for deprecated configuration options. """ def warnDeprecated(version, msg): warnings.warn( "[%s and later] %s" % (version, msg), category=ConfigWarning, ) _in_unit_tests = False def loadConfigDict(basedir, configFileName): if not os.path.isdir(basedir): raise ConfigErrors([ "basedir '%s' does not exist" % (basedir,), ]) filename = os.path.join(basedir, configFileName) if not os.path.exists(filename): raise ConfigErrors([ "configuration file '%s' does not exist" % (filename,), ]) try: with open(filename, "r"): pass except IOError as e: raise ConfigErrors([ "unable to open configuration file %r: %s" % (filename, e), ]) log.msg("Loading configuration from %r" % (filename,)) # execute the config file localDict = { 'basedir': os.path.expanduser(basedir), '__file__': os.path.abspath(filename), } old_sys_path = sys.path[:] sys.path.append(basedir) try: try: execfile(filename, localDict) except ConfigErrors: raise except SyntaxError: error("encountered a SyntaxError while parsing config file:\n%s " % (traceback.format_exc(),), always_raise=True, ) except Exception: log.err(failure.Failure(), 'error while parsing config file:') error("error while parsing config file: %s (traceback in logfile)" % (sys.exc_info()[1],), always_raise=True, ) finally: sys.path[:] = old_sys_path if 'BuildmasterConfig' not in localDict: error("Configuration file %r does not define 'BuildmasterConfig'" % (filename,), always_raise=True, ) return filename, localDict['BuildmasterConfig'] @implementer(interfaces.IConfigLoader) class FileLoader(ComparableMixin): compare_attrs = ['basedir', 'configFileName'] def __init__(self, basedir, configFileName): self.basedir = basedir self.configFileName = configFileName def loadConfig(self): # from here on out we can batch errors together for the user's # convenience global _errors _errors = errors = ConfigErrors() try: filename, config_dict = loadConfigDict( self.basedir, self.configFileName) config = MasterConfig.loadFromDict(config_dict, filename) except ConfigErrors as e: errors.merge(e) finally: _errors = None if errors: raise errors return config class MasterConfig(util.ComparableMixin): def __init__(self): # local import to avoid circular imports from buildbot.process import properties # default values for all attributes # global self.title = 'Buildbot' self.titleURL = 'http://buildbot.net' self.buildbotURL = 'http://localhost:8080/' self.changeHorizon = None self.logCompressionLimit = 4 * 1024 self.logCompressionMethod = 'gz' self.logEncoding = 'utf-8' self.logMaxSize = None self.logMaxTailSize = None self.properties = properties.Properties() self.collapseRequests = None self.codebaseGenerator = None self.prioritizeBuilders = None self.multiMaster = False self.manhole = None self.protocols = {} self.buildbotNetUsageData = "basic" self.validation = dict( branch=re.compile(r'^[\w.+/~-]*$'), revision=re.compile(r'^[ \w\.\-/]*$'), property_name=re.compile(r'^[\w\.\-/~:]*$'), property_value=re.compile(r'^[\w\.\-/~:]*$'), ) self.db = dict( db_url=DEFAULT_DB_URL, ) self.mq = dict( type='simple', ) self.metrics = None self.caches = dict( Builds=15, Changes=10, ) self.schedulers = {} self.secretsProviders = [] self.builders = [] self.workers = [] self.change_sources = [] self.machines = [] self.status = [] self.user_managers = [] self.revlink = default_revlink_matcher self.www = dict( port=None, plugins=dict(), auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=avatar.AvatarGravatar(), logfileName='http.log', ) self.services = {} _known_config_keys = set([ "buildbotNetUsageData", "buildbotURL", "buildCacheSize", "builders", "buildHorizon", "caches", "change_source", "codebaseGenerator", "configurators", "changeCacheSize", "changeHorizon", 'db', "db_poll_interval", "db_url", "logCompressionLimit", "logCompressionMethod", "logEncoding", "logHorizon", "logMaxSize", "logMaxTailSize", "manhole", "machines", "collapseRequests", "metrics", "mq", "multiMaster", "prioritizeBuilders", "projectName", "projectURL", "properties", "protocols", "revlink", "schedulers", "secretsProviders", "services", # we had c['status'] = [] for a while in our default master.cfg # so we need to keep it there "status", "title", "titleURL", "user_managers", "validation", "www", "workers", ]) compare_attrs = list(_known_config_keys) def preChangeGenerator(self, **kwargs): return { 'author': kwargs.get('author', None), 'files': kwargs.get('files', None), 'comments': kwargs.get('comments', None), 'revision': kwargs.get('revision', None), 'when_timestamp': kwargs.get('when_timestamp', None), 'branch': kwargs.get('branch', None), 'category': kwargs.get('category', None), 'revlink': kwargs.get('revlink', ''), 'properties': kwargs.get('properties', {}), 'repository': kwargs.get('repository', ''), 'project': kwargs.get('project', ''), 'codebase': kwargs.get('codebase', None) } @classmethod def loadFromDict(cls, config_dict, filename): # warning, all of this is loaded from a thread global _errors _errors = errors = ConfigErrors() # check for unknown keys unknown_keys = set(config_dict.keys()) - cls._known_config_keys if unknown_keys: if len(unknown_keys) == 1: error('Unknown BuildmasterConfig key %s' % (unknown_keys.pop())) else: error('Unknown BuildmasterConfig keys %s' % (', '.join(sorted(unknown_keys)))) # instantiate a new config object, which will apply defaults # automatically config = cls() # and defer the rest to sub-functions, for code clarity try: config.run_configurators(filename, config_dict) config.load_global(filename, config_dict) config.load_validation(filename, config_dict) config.load_db(filename, config_dict) config.load_mq(filename, config_dict) config.load_metrics(filename, config_dict) config.load_secrets(filename, config_dict) config.load_caches(filename, config_dict) config.load_schedulers(filename, config_dict) config.load_builders(filename, config_dict) config.load_workers(filename, config_dict) config.load_change_sources(filename, config_dict) config.load_machines(filename, config_dict) config.load_user_managers(filename, config_dict) config.load_www(filename, config_dict) config.load_services(filename, config_dict) # run some sanity checks config.check_single_master() config.check_schedulers() config.check_locks() config.check_builders() config.check_ports() config.check_machines() finally: _errors = None if errors: raise errors return config def run_configurators(self, filename, config_dict): for configurator in config_dict.get('configurators', []): interfaces.IConfigurator(configurator).configure(config_dict) def load_global(self, filename, config_dict): def copy_param(name, alt_key=None, check_type=None, check_type_name=None, can_be_callable=False): if name in config_dict: v = config_dict[name] elif alt_key and alt_key in config_dict: v = config_dict[alt_key] else: return if v is not None and check_type and not ( isinstance(v, check_type) or (can_be_callable and callable(v))): error("c['%s'] must be %s" % (name, check_type_name)) else: setattr(self, name, v) def copy_int_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=int, check_type_name='an int') def copy_str_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=(str,), check_type_name='a string') copy_str_param('title', alt_key='projectName') max_title_len = 18 if len(self.title) > max_title_len: # Warn if the title length limiting logic in www/base/src/app/app.route.js # would hide the title. warnings.warn('WARNING: Title is too long to be displayed. ' + '"Buildbot" will be used instead.', category=ConfigWarning) copy_str_param('titleURL', alt_key='projectURL') copy_str_param('buildbotURL') def copy_str_or_callable_param(name, alt_key=None): copy_param(name, alt_key=alt_key, check_type=(str,), check_type_name='a string or callable', can_be_callable=True) if "buildbotNetUsageData" not in config_dict: if _in_unit_tests: self.buildbotNetUsageData = None else: warnDeprecated( '0.9.0', '`buildbotNetUsageData` is not configured and defaults to basic.\n' 'This parameter helps the buildbot development team to understand' ' the installation base.\n' 'No personal information is collected.\n' 'Only installation software version info and plugin usage is sent.\n' 'You can `opt-out` by setting this variable to None.\n' 'Or `opt-in` for more information by setting it to "full".\n' ) copy_str_or_callable_param('buildbotNetUsageData') for horizon in ('logHorizon', 'buildHorizon', 'eventHorizon'): if horizon in config_dict: warnDeprecated( '0.9.0', "NOTE: `{}` is deprecated and ignored " "They are replaced by util.JanitorConfigurator".format(horizon)) if 'status' in config_dict: warnDeprecated( '0.9.0', "NOTE: `status` targets are deprecated and ignored " "They are replaced by reporters") copy_int_param('changeHorizon') copy_int_param('logCompressionLimit') self.logCompressionMethod = config_dict.get( 'logCompressionMethod', 'gz') if self.logCompressionMethod not in ('raw', 'bz2', 'gz', 'lz4'): error( "c['logCompressionMethod'] must be 'raw', 'bz2', 'gz' or 'lz4'") if self.logCompressionMethod == "lz4": try: import lz4 # pylint: disable=import-outside-toplevel [lz4] except ImportError: error( "To set c['logCompressionMethod'] to 'lz4' you must install the lz4 library ('pip install lz4')") copy_int_param('logMaxSize') copy_int_param('logMaxTailSize') copy_param('logEncoding') properties = config_dict.get('properties', {}) if not isinstance(properties, dict): error("c['properties'] must be a dictionary") else: self.properties.update(properties, filename) collapseRequests = config_dict.get('collapseRequests') if (collapseRequests not in (None, True, False) and not callable(collapseRequests)): error("collapseRequests must be a callable, True, or False") else: self.collapseRequests = collapseRequests codebaseGenerator = config_dict.get('codebaseGenerator') if (codebaseGenerator is not None and not callable(codebaseGenerator)): error( "codebaseGenerator must be a callable accepting a dict and returning a str") else: self.codebaseGenerator = codebaseGenerator prioritizeBuilders = config_dict.get('prioritizeBuilders') if prioritizeBuilders is not None and not callable(prioritizeBuilders): error("prioritizeBuilders must be a callable") else: self.prioritizeBuilders = prioritizeBuilders protocols = config_dict.get('protocols', {}) if isinstance(protocols, dict): for proto, options in protocols.items(): if not isinstance(proto, str): error("c['protocols'] keys must be strings") if not isinstance(options, dict): error("c['protocols']['%s'] must be a dict" % proto) return if proto == "wamp": self.check_wamp_proto(options) else: error("c['protocols'] must be dict") return self.protocols = protocols if 'multiMaster' in config_dict: self.multiMaster = config_dict["multiMaster"] if 'debugPassword' in config_dict: log.msg( "the 'debugPassword' parameter is unused and can be removed from the configuration file") if 'manhole' in config_dict: # we don't check that this is a manhole instance, since that # requires importing buildbot.manhole for every user, and currently # that will fail if cryptography isn't installed self.manhole = config_dict['manhole'] if 'revlink' in config_dict: revlink = config_dict['revlink'] if not callable(revlink): error("revlink must be a callable") else: self.revlink = revlink def load_validation(self, filename, config_dict): validation = config_dict.get("validation", {}) if not isinstance(validation, dict): error("c['validation'] must be a dictionary") else: unknown_keys = ( set(validation.keys()) - set(self.validation.keys())) if unknown_keys: error("unrecognized validation key(s): %s" % (", ".join(unknown_keys))) else: self.validation.update(validation) @staticmethod def getDbUrlFromConfig(config_dict, throwErrors=True): if 'db' in config_dict: db = config_dict['db'] if set(db.keys()) - set(['db_url', 'db_poll_interval']) and throwErrors: error("unrecognized keys in c['db']") config_dict = db if 'db_poll_interval' in config_dict and throwErrors: warnDeprecated( "0.8.7", "db_poll_interval is deprecated and will be ignored") # we don't attempt to parse db URLs here - the engine strategy will do # so. if 'db_url' in config_dict: return config_dict['db_url'] return DEFAULT_DB_URL def load_db(self, filename, config_dict): self.db = dict(db_url=self.getDbUrlFromConfig(config_dict)) def load_mq(self, filename, config_dict): from buildbot.mq import connector # avoid circular imports if 'mq' in config_dict: self.mq.update(config_dict['mq']) classes = connector.MQConnector.classes typ = self.mq.get('type', 'simple') if typ not in classes: error("mq type '%s' is not known" % (typ,)) return known_keys = classes[typ]['keys'] unk = set(self.mq.keys()) - known_keys - set(['type']) if unk: error("unrecognized keys in c['mq']: %s" % (', '.join(unk),)) def load_metrics(self, filename, config_dict): # we don't try to validate metrics keys if 'metrics' in config_dict: metrics = config_dict["metrics"] if not isinstance(metrics, dict): error("c['metrics'] must be a dictionary") else: self.metrics = metrics def load_secrets(self, filename, config_dict): if 'secretsProviders' in config_dict: secretsProviders = config_dict["secretsProviders"] if not isinstance(secretsProviders, list): error("c['secretsProviders'] must be a list") else: self.secretsProviders = secretsProviders def load_caches(self, filename, config_dict): explicit = False if 'caches' in config_dict: explicit = True caches = config_dict['caches'] if not isinstance(caches, dict): error("c['caches'] must be a dictionary") else: for (name, value) in caches.items(): if not isinstance(value, int): error("value for cache size '%s' must be an integer" % name) return if value < 1: error("'%s' cache size must be at least 1, got '%s'" % (name, value)) self.caches.update(caches) if 'buildCacheSize' in config_dict: if explicit: msg = "cannot specify c['caches'] and c['buildCacheSize']" error(msg) self.caches['Builds'] = config_dict['buildCacheSize'] if 'changeCacheSize' in config_dict: if explicit: msg = "cannot specify c['caches'] and c['changeCacheSize']" error(msg) self.caches['Changes'] = config_dict['changeCacheSize'] def load_schedulers(self, filename, config_dict): if 'schedulers' not in config_dict: return schedulers = config_dict['schedulers'] ok = True if not isinstance(schedulers, (list, tuple)): ok = False else: for s in schedulers: if not interfaces.IScheduler.providedBy(s): ok = False if not ok: msg = "c['schedulers'] must be a list of Scheduler instances" error(msg) # convert from list to dict, first looking for duplicates seen_names = set() for s in schedulers: if s.name in seen_names: error("scheduler name '%s' used multiple times" % s.name) seen_names.add(s.name) self.schedulers = dict((s.name, s) for s in schedulers) def load_builders(self, filename, config_dict): if 'builders' not in config_dict: return builders = config_dict['builders'] if not isinstance(builders, (list, tuple)): error("c['builders'] must be a list") return # convert all builder configs to BuilderConfig instances def mapper(b): if isinstance(b, BuilderConfig): return b elif isinstance(b, dict): return BuilderConfig(**b) else: error("%r is not a builder config (in c['builders']" % (b,)) builders = [mapper(b) for b in builders] for builder in builders: if builder and os.path.isabs(builder.builddir): warnings.warn( "Absolute path '%s' for builder may cause " "mayhem. Perhaps you meant to specify workerbuilddir " "instead.", category=ConfigWarning, ) self.builders = builders @staticmethod def _check_workers(workers, conf_key): if not isinstance(workers, (list, tuple)): error("{0} must be a list".format(conf_key)) return False for worker in workers: if not interfaces.IWorker.providedBy(worker): msg = "{} must be a list of Worker instances but there is {!r}".format( conf_key, worker) error(msg) return False def validate(workername): if workername in ("debug", "change", "status"): yield "worker name %r is reserved" % workername if not util_identifiers.ident_re.match(workername): yield "worker name %r is not an identifier" % workername if not workername: yield "worker name %r cannot be an empty string" % workername if len(workername) > 50: yield "worker name %r is longer than %d characters" % (workername, 50) errors = list(validate(worker.workername)) for msg in errors: error(msg) if errors: return False return True def load_workers(self, filename, config_dict): workers = config_dict.get('workers') if workers is None: return if not self._check_workers(workers, "c['workers']"): return self.workers = workers[:] def load_change_sources(self, filename, config_dict): change_source = config_dict.get('change_source', []) if isinstance(change_source, (list, tuple)): change_sources = change_source else: change_sources = [change_source] for s in change_sources: if not interfaces.IChangeSource.providedBy(s): msg = "c['change_source'] must be a list of change sources" error(msg) return self.change_sources = change_sources def load_machines(self, filename, config_dict): if 'machines' not in config_dict: return machines = config_dict['machines'] msg = "c['machines'] must be a list of machines" if not isinstance(machines, (list, tuple)): error(msg) return for m in machines: if not interfaces.IMachine.providedBy(m): error(msg) return self.machines = machines def load_user_managers(self, filename, config_dict): if 'user_managers' not in config_dict: return user_managers = config_dict['user_managers'] msg = "c['user_managers'] must be a list of user managers" if not isinstance(user_managers, (list, tuple)): error(msg) return self.user_managers = user_managers def load_www(self, filename, config_dict): if 'www' not in config_dict: return www_cfg = config_dict['www'] allowed = {'port', 'debug', 'json_cache_seconds', 'rest_minimum_version', 'allowed_origins', 'jsonp', 'plugins', 'auth', 'authz', 'avatar_methods', 'logfileName', 'logRotateLength', 'maxRotatedFiles', 'versions', 'change_hook_dialects', 'change_hook_auth', 'default_page', 'custom_templates_dir', 'cookie_expiration_time', 'ui_default_config'} unknown = set(list(www_cfg)) - allowed if unknown: error("unknown www configuration parameter(s) %s" % (', '.join(unknown),)) versions = www_cfg.get('versions') if versions is not None: cleaned_versions = [] if not isinstance(versions, list): error('Invalid www configuration value of versions') else: for i, v in enumerate(versions): if not isinstance(v, tuple) or len(v) < 2: error('Invalid www configuration value of versions') break cleaned_versions.append(v) www_cfg['versions'] = cleaned_versions cookie_expiration_time = www_cfg.get('cookie_expiration_time') if cookie_expiration_time is not None: if not isinstance(cookie_expiration_time, datetime.timedelta): error( 'Invalid www["cookie_expiration_time"] configuration should be a datetime.timedelta') self.www.update(www_cfg) def load_services(self, filename, config_dict): if 'services' not in config_dict: return self.services = {} for _service in config_dict['services']: if not isinstance(_service, util_service.BuildbotService): error("%s object should be an instance of " "buildbot.util.service.BuildbotService" % type(_service)) continue if _service.name in self.services: error('Duplicate service name %r' % _service.name) continue self.services[_service.name] = _service def check_single_master(self): # check additional problems that are only valid in a single-master # installation if self.multiMaster: return if not self.workers: error("no workers are configured") if not self.builders: error("no builders are configured") # check that all builders are implemented on this master unscheduled_buildernames = {b.name for b in self.builders} for s in self.schedulers.values(): builderNames = s.listBuilderNames() if interfaces.IRenderable.providedBy(builderNames): unscheduled_buildernames.clear() else: for n in builderNames: if interfaces.IRenderable.providedBy(n): unscheduled_buildernames.clear() elif n in unscheduled_buildernames: unscheduled_buildernames.remove(n) if unscheduled_buildernames: error("builder(s) %s have no schedulers to drive them" % (', '.join(unscheduled_buildernames),)) def check_schedulers(self): # don't perform this check in multiMaster mode if self.multiMaster: return all_buildernames = {b.name for b in self.builders} for s in self.schedulers.values(): builderNames = s.listBuilderNames() if interfaces.IRenderable.providedBy(builderNames): continue for n in builderNames: if interfaces.IRenderable.providedBy(n): continue if n not in all_buildernames: error("Unknown builder '%s' in scheduler '%s'" % (n, s.name)) def check_locks(self): # assert that all locks used by the Builds and their Steps are # uniquely named. lock_dict = {} def check_lock(lock): if isinstance(lock, locks.LockAccess): lock = lock.lockid if lock.name in lock_dict: if lock_dict[lock.name] is not lock: msg = "Two locks share the same name, '%s'" % lock.name error(msg) else: lock_dict[lock.name] = lock for b in self.builders: if b.locks and not IRenderable.providedBy(b.locks): for lock in b.locks: check_lock(lock) def check_builders(self): # look both for duplicate builder names, and for builders pointing # to unknown workers workernames = {w.workername for w in self.workers} seen_names = set() seen_builddirs = set() for b in self.builders: unknowns = set(b.workernames) - workernames if unknowns: error("builder '%s' uses unknown workers %s" % (b.name, ", ".join(repr(u) for u in unknowns))) if b.name in seen_names: error("duplicate builder name '%s'" % b.name) seen_names.add(b.name) if b.builddir in seen_builddirs: error("duplicate builder builddir '%s'" % b.builddir) seen_builddirs.add(b.builddir) def check_ports(self): ports = set() if self.protocols: for proto, options in self.protocols.items(): if proto == 'null': port = -1 else: port = options.get("port") if not port: continue if isinstance(port, int): # Conversion needed to compare listenTCP and strports ports port = "tcp:%d" % port if port != -1 and port in ports: error("Some of ports in c['protocols'] duplicated") ports.add(port) if ports: return if self.workers: error("workers are configured, but c['protocols'] not") def check_machines(self): seen_names = set() for mm in self.machines: if mm.name in seen_names: error("duplicate machine name '{}'".format(mm.name)) seen_names.add(mm.name) for w in self.workers: if w.machine_name is not None and w.machine_name not in seen_names: error("worker '{}' uses unknown machine '{}'".format( w.name, w.machine_name)) class BuilderConfig(util_config.ConfiguredMixin): def __init__(self, name=None, workername=None, workernames=None, builddir=None, workerbuilddir=None, factory=None, tags=None, category=None, nextWorker=None, nextBuild=None, locks=None, env=None, properties=None, collapseRequests=None, description=None, canStartBuild=None, defaultProperties=None ): # name is required, and can't start with '_' if not name or type(name) not in (bytes, str): error("builder's name is required") name = '' elif name[0] == '_' and name not in RESERVED_UNDERSCORE_NAMES: error( "builder names must not start with an underscore: '%s'" % name) try: self.name = util.bytes2unicode(name, encoding="ascii") except UnicodeDecodeError: error("builder names must be unicode or ASCII") # factory is required if factory is None: error("builder '%s' has no factory" % name) from buildbot.process.factory import BuildFactory if factory is not None and not isinstance(factory, BuildFactory): error("builder '%s's factory is not a BuildFactory instance" % name) self.factory = factory # workernames can be a single worker name or a list, and should also # include workername, if given if isinstance(workernames, str): workernames = [workernames] if workernames: if not isinstance(workernames, list): error("builder '%s': workernames must be a list or a string" % (name,)) else: workernames = [] if workername: if not isinstance(workername, str): error("builder '%s': workername must be a string but it is %r" % ( name, workername)) workernames = workernames + [workername] if not workernames: error("builder '%s': at least one workername is required" % (name,)) self.workernames = workernames # builddir defaults to name if builddir is None: builddir = safeTranslate(name) builddir = bytes2unicode(builddir) self.builddir = builddir # workerbuilddir defaults to builddir if workerbuilddir is None: workerbuilddir = builddir self.workerbuilddir = workerbuilddir # remainder are optional if category and tags: error("builder '%s': builder categories are deprecated and " "replaced by tags; you should only specify tags" % (name,)) if category: warnDeprecated("0.9", "builder '%s': builder categories are " "deprecated and should be replaced with " "'tags=[cat]'" % (name,)) if not isinstance(category, str): error("builder '%s': category must be a string" % (name,)) tags = [category] if tags: if not isinstance(tags, list): error("builder '%s': tags must be a list" % (name,)) bad_tags = any((tag for tag in tags if not isinstance(tag, str))) if bad_tags: error( "builder '%s': tags list contains something that is not a string" % (name,)) if len(tags) != len(set(tags)): dupes = " ".join({x for x in tags if tags.count(x) > 1}) error( "builder '%s': tags list contains duplicate tags: %s" % (name, dupes)) else: tags = [] self.tags = tags self.nextWorker = nextWorker if nextWorker and not callable(nextWorker): error('nextWorker must be a callable') # Keeping support of the previous nextWorker API if nextWorker: argCount = self._countFuncArgs(nextWorker) if (argCount == 2 or (isinstance(nextWorker, MethodType) and argCount == 3)): warnDeprecated( "0.9", "nextWorker now takes a " "3rd argument (build request)") self.nextWorker = lambda x, y, z: nextWorker( x, y) # pragma: no cover self.nextBuild = nextBuild if nextBuild and not callable(nextBuild): error('nextBuild must be a callable') self.canStartBuild = canStartBuild if canStartBuild and not callable(canStartBuild): error('canStartBuild must be a callable') self.locks = locks or [] self.env = env or {} if not isinstance(self.env, dict): error("builder's env must be a dictionary") self.properties = properties or {} self.defaultProperties = defaultProperties or {} self.collapseRequests = collapseRequests self.description = description def getConfigDict(self): # note: this method will disappear eventually - put your smarts in the # constructor! rv = { 'name': self.name, 'workernames': self.workernames, 'factory': self.factory, 'builddir': self.builddir, 'workerbuilddir': self.workerbuilddir, } if self.tags: rv['tags'] = self.tags if self.nextWorker: rv['nextWorker'] = self.nextWorker if self.nextBuild: rv['nextBuild'] = self.nextBuild if self.locks: rv['locks'] = self.locks if self.env: rv['env'] = self.env if self.properties: rv['properties'] = self.properties if self.defaultProperties: rv['defaultProperties'] = self.defaultProperties if self.collapseRequests is not None: rv['collapseRequests'] = self.collapseRequests if self.description: rv['description'] = self.description return rv def _countFuncArgs(self, func): if getattr(inspect, 'signature', None): # Python 3 signature = inspect.signature(func) argCount = len(signature.parameters) else: # Python 2 argSpec = inspect.getargspec(func) argCount = len(argSpec.args) return argCount buildbot-2.6.0/master/buildbot/configurators/000077500000000000000000000000001361162603000213055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/configurators/__init__.py000066400000000000000000000031501361162603000234150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # from zope.interface import implementer from buildbot.interfaces import IConfigurator """ This module holds configurators, which helps setup schedulers, builders, steps, for a very specific purpose. Higher level interfaces to buildbot configurations components. """ @implementer(IConfigurator) class ConfiguratorBase: """ I provide base helper methods for configurators """ def __init__(self): pass def configure(self, config_dict): self.config_dict = c = config_dict if 'schedulers' not in c: c['schedulers'] = [] self.schedulers = c['schedulers'] if 'protocols' not in c: c['protocols'] = {} self.protocols = c['protocols'] if 'builders' not in c: c['builders'] = [] self.builders = c['builders'] if 'workers' not in c: c['workers'] = [] self.workers = c['workers'] buildbot-2.6.0/master/buildbot/configurators/janitor.py000066400000000000000000000062531361162603000233330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # import datetime from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.configurators import ConfiguratorBase from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.timed import Nightly from buildbot.util import datetime2epoch from buildbot.worker.local import LocalWorker """ Janitor is a configurator which create a Janitor Builder with all needed Janitor steps """ JANITOR_NAME = "__Janitor" # If you read this code, you may want to patch this name. def now(): """patchable now (datetime is not patchable as builtin)""" return datetime.datetime.utcnow() class LogChunksJanitor(BuildStep): name = 'LogChunksJanitor' renderables = ["logHorizon"] def __init__(self, logHorizon): super().__init__() self.logHorizon = logHorizon @defer.inlineCallbacks def run(self): older_than_timestamp = datetime2epoch(now() - self.logHorizon) deleted = yield self.master.db.logs.deleteOldLogChunks(older_than_timestamp) self.descriptionDone = ["deleted", str(deleted), "logchunks"] return SUCCESS class JanitorConfigurator(ConfiguratorBase): def __init__(self, logHorizon=None, hour=0, **kwargs): super().__init__() self.logHorizon = logHorizon self.hour = hour self.kwargs = kwargs def configure(self, config_dict): if self.logHorizon is None: return logHorizon = self.logHorizon hour = self.hour kwargs = self.kwargs super().configure(config_dict) nightly_kwargs = {} # we take the defaults of Nightly, except for hour for arg in ('minute', 'dayOfMonth', 'month', 'dayOfWeek'): if arg in kwargs: nightly_kwargs[arg] = kwargs[arg] self.schedulers.append(Nightly( name=JANITOR_NAME, builderNames=[JANITOR_NAME], hour=hour, **nightly_kwargs)) self.schedulers.append(ForceScheduler( name=JANITOR_NAME + "_force", builderNames=[JANITOR_NAME])) self.builders.append(BuilderConfig( name=JANITOR_NAME, workername=JANITOR_NAME, factory=BuildFactory(steps=[ LogChunksJanitor(logHorizon=logHorizon) ]) )) self.protocols.setdefault('null', {}) self.workers.append(LocalWorker(JANITOR_NAME)) buildbot-2.6.0/master/buildbot/data/000077500000000000000000000000001361162603000173315ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/data/__init__.py000066400000000000000000000000001361162603000214300ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/data/base.py000066400000000000000000000134651361162603000206260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import re from collections import UserList from twisted.internet import defer from buildbot.data import exceptions class ResourceType: name = None plural = None endpoints = [] keyFields = [] eventPathPatterns = "" entityType = None def __init__(self, master): self.master = master self.compileEventPathPatterns() def compileEventPathPatterns(self): # We'll run a single format, and then split the string # to get the final event path tuple pathPatterns = self.eventPathPatterns pathPatterns = pathPatterns.split() identifiers = re.compile(r':([^/]*)') for i, pp in enumerate(pathPatterns): pp = identifiers.sub(r'{\1}', pp) if pp.startswith("/"): pp = pp[1:] pathPatterns[i] = pp self.eventPaths = pathPatterns def getEndpoints(self): endpoints = self.endpoints[:] for i, ep in enumerate(endpoints): if not issubclass(ep, Endpoint): raise TypeError("Not an Endpoint subclass") endpoints[i] = ep(self, self.master) return endpoints @staticmethod def sanitizeMessage(msg): msg = copy.deepcopy(msg) return msg def produceEvent(self, msg, event): if msg is not None: msg = self.sanitizeMessage(msg) for path in self.eventPaths: path = path.format(**msg) routingKey = tuple(path.split("/")) + (event,) self.master.mq.produce(routingKey, msg) class Endpoint: pathPatterns = "" rootLinkName = None isCollection = False isRaw = False def __init__(self, rtype, master): self.rtype = rtype self.master = master def get(self, resultSpec, kwargs): raise NotImplementedError def control(self, action, args, kwargs): # we convert the action into a mixedCase method name action_method = getattr(self, "action" + action.capitalize(), None) if action_method is None: raise exceptions.InvalidControlException("action: {} is not supported".format(action)) return action_method(args, kwargs) def __repr__(self): return "endpoint for " + self.pathPatterns class BuildNestingMixin: """ A mixin for methods to decipher the many ways a build, step, or log can be specified. """ @defer.inlineCallbacks def getBuildid(self, kwargs): # need to look in the context of a step, specified by build or # builder or whatever if 'buildid' in kwargs: return kwargs['buildid'] else: builderid = yield self.getBuilderId(kwargs) if builderid is None: return build = yield self.master.db.builds.getBuildByNumber( builderid=builderid, number=kwargs['build_number']) if not build: return return build['id'] @defer.inlineCallbacks def getStepid(self, kwargs): if 'stepid' in kwargs: return kwargs['stepid'] else: buildid = yield self.getBuildid(kwargs) if buildid is None: return dbdict = yield self.master.db.steps.getStep(buildid=buildid, number=kwargs.get( 'step_number'), name=kwargs.get('step_name')) if not dbdict: return return dbdict['id'] def getBuilderId(self, kwargs): if 'buildername' in kwargs: return self.master.db.builders.findBuilderId(kwargs['buildername'], autoCreate=False) return defer.succeed(kwargs['builderid']) class ListResult(UserList): __slots__ = ['offset', 'total', 'limit'] def __init__(self, values, offset=None, total=None, limit=None): super().__init__(values) # if set, this is the index in the overall results of the first element of # this list self.offset = offset # if set, this is the total number of results self.total = total # if set, this is the limit, either from the user or the implementation self.limit = limit def __repr__(self): return "ListResult(%r, offset=%r, total=%r, limit=%r)" % \ (self.data, self.offset, self.total, self.limit) def __eq__(self, other): if isinstance(other, ListResult): return self.data == other.data \ and self.offset == other.offset \ and self.total == other.total \ and self.limit == other.limit return self.data == other \ and self.offset == self.limit is None \ and (self.total is None or self.total == len(other)) def __ne__(self, other): return not (self == other) def updateMethod(func): """Decorate this resourceType instance as an update method, made available at master.data.updates.$funcname""" func.isUpdateMethod = True return func buildbot-2.6.0/master/buildbot/data/builders.py000066400000000000000000000120101361162603000215060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class BuilderEndpoint(base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /builders/n:builderid /builders/i:buildername /masters/n:masterid/builders/n:builderid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): builderid = yield self.getBuilderId(kwargs) if builderid is None: return None bdict = yield self.master.db.builders.getBuilder(builderid) if not bdict: return None if 'masterid' in kwargs: if kwargs['masterid'] not in bdict['masterids']: return None return dict(builderid=builderid, name=bdict['name'], masterids=bdict['masterids'], description=bdict['description'], tags=bdict['tags']) class BuildersEndpoint(base.Endpoint): isCollection = True rootLinkName = 'builders' pathPatterns = """ /builders /masters/n:masterid/builders """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): bdicts = yield self.master.db.builders.getBuilders( masterid=kwargs.get('masterid', None)) return [dict(builderid=bd['id'], name=bd['name'], masterids=bd['masterids'], description=bd['description'], tags=bd['tags']) for bd in bdicts] class Builder(base.ResourceType): name = "builder" plural = "builders" endpoints = [BuilderEndpoint, BuildersEndpoint] keyFields = ['builderid'] eventPathPatterns = """ /builders/:builderid """ class EntityType(types.Entity): builderid = types.Integer() name = types.Identifier(70) masterids = types.List(of=types.Integer()) description = types.NoneOk(types.String()) tags = types.List(of=types.String()) entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, _id, event): builder = yield self.master.data.get(('builders', str(_id))) self.produceEvent(builder, event) @base.updateMethod def findBuilderId(self, name): return self.master.db.builders.findBuilderId(name) @base.updateMethod @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): ret = yield self.master.db.builders.updateBuilderInfo(builderid, description, tags) yield self.generateEvent(builderid, "update") return ret @base.updateMethod @defer.inlineCallbacks def updateBuilderList(self, masterid, builderNames): # get the "current" list of builders for this master, so we know what # changes to make. Race conditions here aren't a great worry, as this # is the only master inserting or deleting these records. builders = yield self.master.db.builders.getBuilders(masterid=masterid) # figure out what to remove and remove it builderNames_set = set(builderNames) for bldr in builders: if bldr['name'] not in builderNames_set: builderid = bldr['id'] yield self.master.db.builders.removeBuilderMaster( masterid=masterid, builderid=builderid) self.master.mq.produce(('builders', str(builderid), 'stopped'), dict(builderid=builderid, masterid=masterid, name=bldr['name'])) else: builderNames_set.remove(bldr['name']) # now whatever's left in builderNames_set is new for name in builderNames_set: builderid = yield self.master.db.builders.findBuilderId(name) yield self.master.db.builders.addBuilderMaster( masterid=masterid, builderid=builderid) self.master.mq.produce(('builders', str(builderid), 'started'), dict(builderid=builderid, masterid=masterid, name=name)) # returns a Deferred that returns None def _masterDeactivated(self, masterid): # called from the masters rtype to indicate that the given master is # deactivated return self.updateBuilderList(masterid, []) buildbot-2.6.0/master/buildbot/data/buildrequests.py000066400000000000000000000274531361162603000226110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.db.buildrequests import NotClaimedError from buildbot.process import results from buildbot.process.results import RETRY class Db2DataMixin: def _generate_filtered_properties(self, props, filters): """ This method returns Build's properties according to property filters. :param props: Properties as a dict (from db) :param filters: Desired properties keys as a list (from API URI) """ # by default no properties are returned if props and filters: return (props if '*' in filters else dict(((k, v) for k, v in props.items() if k in filters))) @defer.inlineCallbacks def addPropertiesToBuildRequest(self, buildrequest, filters): if not filters: return props = yield self.master.db.buildsets.getBuildsetProperties(buildrequest['buildsetid']) filtered_properties = self._generate_filtered_properties(props, filters) if filtered_properties: buildrequest['properties'] = filtered_properties def db2data(self, dbdict): data = { 'buildrequestid': dbdict['buildrequestid'], 'buildsetid': dbdict['buildsetid'], 'builderid': dbdict['builderid'], 'priority': dbdict['priority'], 'claimed': dbdict['claimed'], 'claimed_at': dbdict['claimed_at'], 'claimed_by_masterid': dbdict['claimed_by_masterid'], 'complete': dbdict['complete'], 'results': dbdict['results'], 'submitted_at': dbdict['submitted_at'], 'complete_at': dbdict['complete_at'], 'waited_for': dbdict['waited_for'], 'properties': dbdict.get('properties'), } return defer.succeed(data) fieldMapping = { 'buildrequestid': 'buildrequests.id', 'buildsetid': 'buildrequests.buildsetid', 'builderid': 'buildrequests.builderid', 'priority': 'buildrequests.priority', 'complete': 'buildrequests.complete', 'results': 'buildrequests.results', 'submitted_at': 'buildrequests.submitted_at', 'complete_at': 'buildrequests.complete_at', 'waited_for': 'buildrequests.waited_for', # br claim 'claimed_at': 'buildrequest_claims.claimed_at', 'claimed_by_masterid': 'buildrequest_claims.masterid', } class BuildRequestEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /buildrequests/n:buildrequestid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildrequest = yield self.master.db.buildrequests.getBuildRequest(kwargs['buildrequestid']) if buildrequest: filters = resultSpec.popProperties() if hasattr(resultSpec, 'popProperties') else [] yield self.addPropertiesToBuildRequest(buildrequest, filters) return (yield self.db2data(buildrequest)) return None @defer.inlineCallbacks def control(self, action, args, kwargs): if action != "cancel": raise ValueError("action: {} is not supported".format(action)) brid = kwargs['buildrequestid'] # first, try to claim the request; if this fails, then it's too late to # cancel the build anyway try: b = yield self.master.db.buildrequests.claimBuildRequests(brids=[brid]) except AlreadyClaimedError: # XXX race condition # - After a buildrequest was claimed, and # - Before creating a build, # the claiming master still # needs to do some processing, (send a message to the message queue, # call maybeStartBuild on the related builder). # In that case we won't have the related builds here. We don't have # an alternative to letting them run without stopping them for now. builds = yield self.master.data.get(("buildrequests", brid, "builds")) # Don't call the data API here, as the buildrequests might have been # taken by another master. We just send the stop message and forget # about those. mqKwargs = {'reason': kwargs.get('reason', 'no reason')} for b in builds: self.master.mq.produce(("control", "builds", str(b['buildid']), "stop"), mqKwargs) return None # then complete it with 'CANCELLED'; this is the closest we can get to # cancelling a request without running into trouble with dangling # references. yield self.master.data.updates.completeBuildRequests([brid], results.CANCELLED) class BuildRequestsEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /buildrequests /builders/n:builderid/buildrequests """ rootLinkName = 'buildrequests' @defer.inlineCallbacks def get(self, resultSpec, kwargs): builderid = kwargs.get("builderid", None) complete = resultSpec.popBooleanFilter('complete') claimed_by_masterid = resultSpec.popBooleanFilter( 'claimed_by_masterid') if claimed_by_masterid: # claimed_by_masterid takes precedence over 'claimed' filter # (no need to check consistency with 'claimed' filter even if # 'claimed'=False with 'claimed_by_masterid' set, doesn't make sense) claimed = claimed_by_masterid else: claimed = resultSpec.popBooleanFilter('claimed') bsid = resultSpec.popOneFilter('buildsetid', 'eq') resultSpec.fieldMapping = self.fieldMapping buildrequests = yield self.master.db.buildrequests.getBuildRequests( builderid=builderid, complete=complete, claimed=claimed, bsid=bsid, resultSpec=resultSpec) results = [] filters = resultSpec.popProperties() if hasattr(resultSpec, 'popProperties') else [] for br in buildrequests: yield self.addPropertiesToBuildRequest(br, filters) results.append((yield self.db2data(br))) return results class BuildRequest(base.ResourceType): name = "buildrequest" plural = "buildrequests" endpoints = [BuildRequestEndpoint, BuildRequestsEndpoint] keyFields = ['buildsetid', 'builderid', 'buildrequestid'] eventPathPatterns = """ /buildsets/:buildsetid/builders/:builderid/buildrequests/:buildrequestid /buildrequests/:buildrequestid /builders/:builderid/buildrequests/:buildrequestid """ class EntityType(types.Entity): buildrequestid = types.Integer() buildsetid = types.Integer() builderid = types.Integer() priority = types.Integer() claimed = types.Boolean() claimed_at = types.NoneOk(types.DateTime()) claimed_by_masterid = types.NoneOk(types.Integer()) complete = types.Boolean() results = types.NoneOk(types.Integer()) submitted_at = types.DateTime() complete_at = types.NoneOk(types.DateTime()) waited_for = types.Boolean() properties = types.NoneOk(types.SourcedProperties()) entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, brids, event): for brid in brids: # get the build and munge the result for the notification br = yield self.master.data.get(('buildrequests', str(brid))) self.produceEvent(br, event) @defer.inlineCallbacks def callDbBuildRequests(self, brids, db_callable, event, **kw): if not brids: # empty buildrequest list. No need to call db API return True try: yield db_callable(brids, **kw) except AlreadyClaimedError: # the db layer returned an AlreadyClaimedError exception, usually # because one of the buildrequests has already been claimed by # another master return False yield self.generateEvent(brids, event) return True @base.updateMethod def claimBuildRequests(self, brids, claimed_at=None): return self.callDbBuildRequests(brids, self.master.db.buildrequests.claimBuildRequests, event="claimed", claimed_at=claimed_at) @base.updateMethod @defer.inlineCallbacks def unclaimBuildRequests(self, brids): if brids: yield self.master.db.buildrequests.unclaimBuildRequests(brids) yield self.generateEvent(brids, "unclaimed") @base.updateMethod @defer.inlineCallbacks def completeBuildRequests(self, brids, results, complete_at=None): assert results != RETRY, "a buildrequest cannot be completed with a retry status!" if not brids: # empty buildrequest list. No need to call db API return True try: yield self.master.db.buildrequests.completeBuildRequests( brids, results, complete_at=complete_at) except NotClaimedError: # the db layer returned a NotClaimedError exception, usually # because one of the buildrequests has been claimed by another # master return False yield self.generateEvent(brids, "complete") # check for completed buildsets -- one call for each build request with # a unique bsid seen_bsids = set() for brid in brids: brdict = yield self.master.db.buildrequests.getBuildRequest(brid) if brdict: bsid = brdict['buildsetid'] if bsid in seen_bsids: continue seen_bsids.add(bsid) yield self.master.data.updates.maybeBuildsetComplete(bsid) return True @base.updateMethod @defer.inlineCallbacks def rebuildBuildrequest(self, buildrequest): # goal is to make a copy of the original buildset buildset = yield self.master.data.get(('buildsets', buildrequest['buildsetid'])) properties = yield self.master.data.get(('buildsets', buildrequest['buildsetid'], 'properties')) ssids = [ss['ssid'] for ss in buildset['sourcestamps']] res = yield self.master.data.updates.addBuildset(waited_for=False, scheduler='rebuild', sourcestamps=ssids, reason='rebuild', properties=properties, builderids=[ buildrequest['builderid']], external_idstring=buildset['external_idstring'], parent_buildid=buildset['parent_buildid'], parent_relationship=buildset[ 'parent_relationship'], ) return res buildbot-2.6.0/master/buildbot/data/builds.py000066400000000000000000000220731361162603000211710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.data.resultspec import ResultSpec class Db2DataMixin: def _generate_filtered_properties(self, props, filters): """ This method returns Build's properties according to property filters. .. seealso:: `Official Documentation `_ :param props: The Build's properties as a dict (from db) :param filters: Desired properties keys as a list (from API URI) """ # by default none properties are returned if props and filters: # pragma: no cover return (props if '*' in filters else dict(((k, v) for k, v in props.items() if k in filters))) def db2data(self, dbdict): data = { 'buildid': dbdict['id'], 'number': dbdict['number'], 'builderid': dbdict['builderid'], 'buildrequestid': dbdict['buildrequestid'], 'workerid': dbdict['workerid'], 'masterid': dbdict['masterid'], 'started_at': dbdict['started_at'], 'complete_at': dbdict['complete_at'], 'complete': dbdict['complete_at'] is not None, 'state_string': dbdict['state_string'], 'results': dbdict['results'], 'properties': {} } return defer.succeed(data) fieldMapping = { 'buildid': 'builds.id', 'number': 'builds.number', 'builderid': 'builds.builderid', 'buildrequestid': 'builds.buildrequestid', 'workerid': 'builds.workerid', 'masterid': 'builds.masterid', 'started_at': 'builds.started_at', 'complete_at': 'builds.complete_at', 'state_string': 'builds.state_string', 'results': 'builds.results', } class BuildEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /builds/n:buildid /builders/n:builderid/builds/n:number /builders/i:buildername/builds/n:number """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'buildid' in kwargs: dbdict = yield self.master.db.builds.getBuild(kwargs['buildid']) else: bldr = yield self.getBuilderId(kwargs) if bldr is None: return num = kwargs['number'] dbdict = yield self.master.db.builds.getBuildByNumber(bldr, num) data = yield self.db2data(dbdict) if dbdict else None # In some cases, data could be None if data: filters = resultSpec.popProperties() if hasattr( resultSpec, 'popProperties') else [] # Avoid to request DB for Build's properties if not specified if filters: # pragma: no cover try: props = yield self.master.db.builds.getBuildProperties(data['buildid']) except (KeyError, TypeError): props = {} filtered_properties = self._generate_filtered_properties( props, filters) if filtered_properties: data['properties'] = filtered_properties return data @defer.inlineCallbacks def actionStop(self, args, kwargs): buildid = kwargs.get('buildid') if buildid is None: bldr = kwargs['builderid'] num = kwargs['number'] dbdict = yield self.master.db.builds.getBuildByNumber(bldr, num) buildid = dbdict['id'] self.master.mq.produce(("control", "builds", str(buildid), 'stop'), dict(reason=kwargs.get('reason', args.get('reason', 'no reason')))) @defer.inlineCallbacks def actionRebuild(self, args, kwargs): # we use the self.get and not self.data.get to be able to support all # the pathPatterns of this endpoint build = yield self.get(ResultSpec(), kwargs) buildrequest = yield self.master.data.get(('buildrequests', build['buildrequestid'])) res = yield self.master.data.updates.rebuildBuildrequest(buildrequest) return res class BuildsEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builds /builders/n:builderid/builds /builders/i:buildername/builds /buildrequests/n:buildrequestid/builds /changes/n:changeid/builds /workers/n:workerid/builds """ rootLinkName = 'builds' @defer.inlineCallbacks def get(self, resultSpec, kwargs): changeid = kwargs.get('changeid') if changeid is not None: builds = yield self.master.db.builds.getBuildsForChange(changeid) else: # following returns None if no filter # true or false, if there is a complete filter builderid = None if 'builderid' in kwargs or 'buildername' in kwargs: builderid = yield self.getBuilderId(kwargs) if builderid is None: return [] complete = resultSpec.popBooleanFilter("complete") buildrequestid = resultSpec.popIntegerFilter("buildrequestid") resultSpec.fieldMapping = self.fieldMapping builds = yield self.master.db.builds.getBuilds( builderid=builderid, buildrequestid=kwargs.get('buildrequestid', buildrequestid), workerid=kwargs.get('workerid'), complete=complete, resultSpec=resultSpec) # returns properties' list filters = resultSpec.popProperties() buildscol = [] for b in builds: data = yield self.db2data(b) # Avoid to request DB for Build's properties if not specified if filters: # pragma: no cover props = yield self.master.db.builds.getBuildProperties(b['id']) filtered_properties = self._generate_filtered_properties( props, filters) if filtered_properties: data['properties'] = filtered_properties buildscol.append(data) return buildscol class Build(base.ResourceType): name = "build" plural = "builds" endpoints = [BuildEndpoint, BuildsEndpoint] keyFields = ['builderid', 'buildid', 'workerid'] eventPathPatterns = """ /builders/:builderid/builds/:number /builds/:buildid /workers/:workerid/builds/:buildid """ class EntityType(types.Entity): buildid = types.Integer() number = types.Integer() builderid = types.Integer() buildrequestid = types.Integer() workerid = types.Integer() masterid = types.Integer() started_at = types.DateTime() complete = types.Boolean() complete_at = types.NoneOk(types.DateTime()) results = types.NoneOk(types.Integer()) state_string = types.String() properties = types.NoneOk(types.SourcedProperties()) entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, _id, event): # get the build and munge the result for the notification build = yield self.master.data.get(('builds', str(_id))) self.produceEvent(build, event) @base.updateMethod @defer.inlineCallbacks def addBuild(self, builderid, buildrequestid, workerid): res = yield self.master.db.builds.addBuild( builderid=builderid, buildrequestid=buildrequestid, workerid=workerid, masterid=self.master.masterid, state_string='created') return res @base.updateMethod def generateNewBuildEvent(self, buildid): return self.generateEvent(buildid, "new") @base.updateMethod @defer.inlineCallbacks def setBuildStateString(self, buildid, state_string): res = yield self.master.db.builds.setBuildStateString( buildid=buildid, state_string=state_string) yield self.generateEvent(buildid, "update") return res @base.updateMethod @defer.inlineCallbacks def finishBuild(self, buildid, results): res = yield self.master.db.builds.finishBuild( buildid=buildid, results=results) yield self.generateEvent(buildid, "finished") return res buildbot-2.6.0/master/buildbot/data/buildsets.py000066400000000000000000000213451361162603000217060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import sourcestamps as sourcestampsapi from buildbot.data import types from buildbot.db.buildsets import AlreadyCompleteError from buildbot.process.buildrequest import BuildRequestCollapser from buildbot.process.results import SUCCESS from buildbot.process.results import worst_status from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Db2DataMixin: @defer.inlineCallbacks def db2data(self, bsdict): if not bsdict: return None buildset = bsdict.copy() # gather the actual sourcestamps, in parallel sourcestamps = [] @defer.inlineCallbacks def getSs(ssid): ss = yield self.master.data.get(('sourcestamps', str(ssid))) sourcestamps.append(ss) yield defer.DeferredList([getSs(id) for id in buildset['sourcestamps']], fireOnOneErrback=True, consumeErrors=True) buildset['sourcestamps'] = sourcestamps # minor modifications buildset['submitted_at'] = datetime2epoch(buildset['submitted_at']) buildset['complete_at'] = datetime2epoch(buildset['complete_at']) return buildset fieldMapping = { 'bsid': 'buildsets.id', 'external_idstring': 'buildsets.external_idstring', 'reason': 'buildsets.reason', 'submitted_at': 'buildsets.submitted_at', 'complete': 'buildsets.complete', 'complete_at': 'buildsets.complete_at', 'results': 'buildsets.results', 'parent_buildid': 'buildsets.parent_buildid', 'parent_relationship': 'buildsets.parent_relationship' } class BuildsetEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /buildsets/n:bsid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): res = yield self.master.db.buildsets.getBuildset(kwargs['bsid']) res = yield self.db2data(res) return res class BuildsetsEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /buildsets """ rootLinkName = 'buildsets' def get(self, resultSpec, kwargs): complete = resultSpec.popBooleanFilter('complete') resultSpec.fieldMapping = self.fieldMapping d = self.master.db.buildsets.getBuildsets( complete=complete, resultSpec=resultSpec) @d.addCallback def db2data(buildsets): d = defer.DeferredList([self.db2data(bs) for bs in buildsets], fireOnOneErrback=True, consumeErrors=True) @d.addCallback def getResults(res): return [r[1] for r in res] return d return d class Buildset(base.ResourceType): name = "buildset" plural = "buildsets" endpoints = [BuildsetEndpoint, BuildsetsEndpoint] keyFields = ['bsid'] eventPathPatterns = """ /buildsets/:bsid """ class EntityType(types.Entity): bsid = types.Integer() external_idstring = types.NoneOk(types.String()) reason = types.String() submitted_at = types.Integer() complete = types.Boolean() complete_at = types.NoneOk(types.Integer()) results = types.NoneOk(types.Integer()) sourcestamps = types.List( of=sourcestampsapi.SourceStamp.entityType) parent_buildid = types.NoneOk(types.Integer()) parent_relationship = types.NoneOk(types.String()) entityType = EntityType(name) @base.updateMethod @defer.inlineCallbacks def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): if sourcestamps is None: sourcestamps = [] if properties is None: properties = {} if builderids is None: builderids = [] submitted_at = int(self.master.reactor.seconds()) bsid, brids = yield self.master.db.buildsets.addBuildset( sourcestamps=sourcestamps, reason=reason, properties=properties, builderids=builderids, waited_for=waited_for, external_idstring=external_idstring, submitted_at=epoch2datetime(submitted_at), parent_buildid=parent_buildid, parent_relationship=parent_relationship) yield BuildRequestCollapser(self.master, list(brids.values())).collapse() # get each of the sourcestamps for this buildset (sequentially) bsdict = yield self.master.db.buildsets.getBuildset(bsid) sourcestamps = [] for ssid in bsdict['sourcestamps']: sourcestamps.append( (yield self.master.data.get(('sourcestamps', str(ssid)))).copy() ) # notify about the component build requests brResource = self.master.data.getResourceType("buildrequest") brResource.generateEvent(list(brids.values()), 'new') # and the buildset itself msg = dict( bsid=bsid, external_idstring=external_idstring, reason=reason, submitted_at=submitted_at, complete=False, complete_at=None, results=None, scheduler=scheduler, sourcestamps=sourcestamps) # TODO: properties=properties) self.produceEvent(msg, "new") log.msg("added buildset %d to database" % bsid) # if there are no builders, then this is done already, so send the # appropriate messages for that if not builderids: yield self.maybeBuildsetComplete(bsid) return (bsid, brids) @base.updateMethod @defer.inlineCallbacks def maybeBuildsetComplete(self, bsid): brdicts = yield self.master.db.buildrequests.getBuildRequests( bsid=bsid, complete=False) # if there are incomplete buildrequests, bail out if brdicts: return brdicts = yield self.master.db.buildrequests.getBuildRequests(bsid=bsid) # figure out the overall results of the buildset: cumulative_results = SUCCESS for brdict in brdicts: cumulative_results = worst_status( cumulative_results, brdict['results']) # get a copy of the buildset bsdict = yield self.master.db.buildsets.getBuildset(bsid) # if it's already completed, we're late to the game, and there's # nothing to do. # # NOTE: there's still a strong possibility of a race condition here, # which would cause buildset being completed twice. # in this case, the db layer will detect that and raise AlreadyCompleteError if bsdict['complete']: return # mark it as completed in the database complete_at = epoch2datetime(int(self.master.reactor.seconds())) try: yield self.master.db.buildsets.completeBuildset(bsid, cumulative_results, complete_at=complete_at) except AlreadyCompleteError: return # get the sourcestamps for the message # get each of the sourcestamps for this buildset (sequentially) bsdict = yield self.master.db.buildsets.getBuildset(bsid) sourcestamps = [] for ssid in bsdict['sourcestamps']: sourcestamps.append( copy.deepcopy( (yield self.master.data.get(('sourcestamps', str(ssid)))) ) ) msg = dict( bsid=bsid, external_idstring=bsdict['external_idstring'], reason=bsdict['reason'], sourcestamps=sourcestamps, submitted_at=bsdict['submitted_at'], complete=True, complete_at=complete_at, results=cumulative_results) # TODO: properties=properties) self.produceEvent(msg, "complete") buildbot-2.6.0/master/buildbot/data/changes.py000066400000000000000000000202471361162603000213200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import sourcestamps from buildbot.data import types from buildbot.process import metrics from buildbot.process.users import users from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class FixerMixin: @defer.inlineCallbacks def _fixChange(self, change): # TODO: make these mods in the DB API if change: change = change.copy() change['when_timestamp'] = datetime2epoch(change['when_timestamp']) sskey = ('sourcestamps', str(change['sourcestampid'])) change['sourcestamp'] = yield self.master.data.get(sskey) del change['sourcestampid'] return change class ChangeEndpoint(FixerMixin, base.Endpoint): isCollection = False pathPatterns = """ /changes/n:changeid """ def get(self, resultSpec, kwargs): d = self.master.db.changes.getChange(kwargs['changeid']) d.addCallback(self._fixChange) return d class ChangesEndpoint(FixerMixin, base.Endpoint): isCollection = True pathPatterns = """ /changes /builds/n:buildid/changes /sourcestamps/n:ssid/changes """ rootLinkName = 'changes' @defer.inlineCallbacks def get(self, resultSpec, kwargs): buildid = kwargs.get('buildid') ssid = kwargs.get('ssid') if buildid is not None: changes = yield self.master.db.changes.getChangesForBuild(buildid) elif ssid is not None: change = yield self.master.db.changes.getChangeFromSSid(ssid) if change is not None: changes = [change] else: changes = [] else: # this special case is useful and implemented by the dbapi # so give it a boost if (resultSpec.order == ('-changeid',) and resultSpec.limit and resultSpec.offset is None): changes = yield self.master.db.changes.getRecentChanges(resultSpec.limit) else: changes = yield self.master.db.changes.getChanges() results = [] for ch in changes: results.append((yield self._fixChange(ch))) return results class Change(base.ResourceType): name = "change" plural = "changes" endpoints = [ChangeEndpoint, ChangesEndpoint] eventPathPatterns = """ /changes/:changeid """ class EntityType(types.Entity): changeid = types.Integer() parent_changeids = types.List(of=types.Integer()) author = types.String() committer = types.String() files = types.List(of=types.String()) comments = types.String() revision = types.NoneOk(types.String()) when_timestamp = types.Integer() branch = types.NoneOk(types.String()) category = types.NoneOk(types.String()) revlink = types.NoneOk(types.String()) properties = types.SourcedProperties() repository = types.String() project = types.String() codebase = types.String() sourcestamp = sourcestamps.SourceStamp.entityType entityType = EntityType(name) @base.updateMethod @defer.inlineCallbacks def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): metrics.MetricCountEvent.log("added_changes", 1) if properties is None: properties = {} # add the source to the properties for k in properties: properties[k] = (properties[k], 'Change') # get a user id if src: # create user object, returning a corresponding uid uid = yield users.createUserObject(self.master, author, src) else: uid = None if not revlink and revision and repository and callable(self.master.config.revlink): # generate revlink from revision and repository using the configured callable revlink = self.master.config.revlink(revision, repository) or '' if callable(category): pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, revlink=revlink, properties=properties, repository=repository, project=project) category = category(pre_change) # set the codebase, either the default, supplied, or generated if codebase is None \ and self.master.config.codebaseGenerator is not None: pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, project=project) codebase = self.master.config.codebaseGenerator(pre_change) codebase = str(codebase) else: codebase = codebase or '' # add the Change to the database changeid = yield self.master.db.changes.addChange( author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=epoch2datetime(when_timestamp), branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, codebase=codebase, project=project, uid=uid) # get the change and munge the result for the notification change = yield self.master.data.get(('changes', str(changeid))) change = copy.deepcopy(change) self.produceEvent(change, 'new') # log, being careful to handle funny characters msg = "added change with revision %s to database" % (revision,) log.msg(msg.encode('utf-8', 'replace')) return changeid buildbot-2.6.0/master/buildbot/data/changesources.py000066400000000000000000000100311361162603000225270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import masters from buildbot.data import types from buildbot.db.changesources import ChangeSourceAlreadyClaimedError class Db2DataMixin: @defer.inlineCallbacks def db2data(self, dbdict): master = None if dbdict['masterid'] is not None: master = yield self.master.data.get( ('masters', dbdict['masterid'])) data = { 'changesourceid': dbdict['id'], 'name': dbdict['name'], 'master': master, } return data class ChangeSourceEndpoint(Db2DataMixin, base.Endpoint): pathPatterns = """ /changesources/n:changesourceid /masters/n:masterid/changesources/n:changesourceid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): dbdict = yield self.master.db.changesources.getChangeSource( kwargs['changesourceid']) if 'masterid' in kwargs: if dbdict['masterid'] != kwargs['masterid']: return return (yield self.db2data(dbdict)) if dbdict else None class ChangeSourcesEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /changesources /masters/n:masterid/changesources """ rootLinkName = 'changesources' @defer.inlineCallbacks def get(self, resultSpec, kwargs): changesources = yield self.master.db.changesources.getChangeSources( masterid=kwargs.get('masterid')) csdicts = yield defer.DeferredList( [self.db2data(cs) for cs in changesources], consumeErrors=True, fireOnOneErrback=True) return [r for (s, r) in csdicts] class ChangeSource(base.ResourceType): name = "changesource" plural = "changesources" endpoints = [ChangeSourceEndpoint, ChangeSourcesEndpoint] keyFields = ['changesourceid'] class EntityType(types.Entity): changesourceid = types.Integer() name = types.String() master = types.NoneOk(masters.Master.entityType) entityType = EntityType(name) @base.updateMethod def findChangeSourceId(self, name): return self.master.db.changesources.findChangeSourceId(name) @base.updateMethod def trySetChangeSourceMaster(self, changesourceid, masterid): # the db layer throws an exception if the claim fails; we translate # that to a straight true-false value. We could trap the exception # type, but that seems a bit too restrictive d = self.master.db.changesources.setChangeSourceMaster( changesourceid, masterid) # set is successful: deferred result is True d.addCallback(lambda _: True) @d.addErrback def trapAlreadyClaimedError(why): # the db layer throws an exception if the claim fails; we squash # that error but let other exceptions continue upward why.trap(ChangeSourceAlreadyClaimedError) # set failed: deferred result is False return False return d @defer.inlineCallbacks def _masterDeactivated(self, masterid): changesources = yield self.master.db.changesources.getChangeSources( masterid=masterid) for cs in changesources: yield self.master.db.changesources.setChangeSourceMaster(cs['id'], None) buildbot-2.6.0/master/buildbot/data/connector.py000066400000000000000000000121741361162603000217020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect from twisted.internet import defer from twisted.python import reflect from buildbot.data import base from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.util import pathmatch from buildbot.util import service class Updates: # empty container object; see _scanModule, below pass class RTypes: # empty container object; see _scanModule, below pass class DataConnector(service.AsyncService): submodules = [ 'buildbot.data.builders', 'buildbot.data.builds', 'buildbot.data.buildrequests', 'buildbot.data.workers', 'buildbot.data.steps', 'buildbot.data.logs', 'buildbot.data.logchunks', 'buildbot.data.buildsets', 'buildbot.data.changes', 'buildbot.data.changesources', 'buildbot.data.masters', 'buildbot.data.sourcestamps', 'buildbot.data.schedulers', 'buildbot.data.forceschedulers', 'buildbot.data.root', 'buildbot.data.properties', ] name = "data" def __init__(self): self.matcher = pathmatch.Matcher() self.rootLinks = [] # links from the root of the API @defer.inlineCallbacks def setServiceParent(self, parent): yield super().setServiceParent(parent) self._setup() def _scanModule(self, mod, _noSetattr=False): for sym in dir(mod): obj = getattr(mod, sym) if inspect.isclass(obj) and issubclass(obj, base.ResourceType): rtype = obj(self.master) setattr(self.rtypes, rtype.name, rtype) # put its update methods into our 'updates' attribute for name in dir(rtype): o = getattr(rtype, name) if hasattr(o, 'isUpdateMethod'): setattr(self.updates, name, o) # load its endpoints for ep in rtype.getEndpoints(): # don't use inherited values for these parameters clsdict = ep.__class__.__dict__ pathPatterns = clsdict.get('pathPatterns', '') pathPatterns = pathPatterns.split() pathPatterns = [tuple(pp.split('/')[1:]) for pp in pathPatterns] for pp in pathPatterns: # special-case the root if pp == ('',): pp = () self.matcher[pp] = ep rootLinkName = clsdict.get('rootLinkName') if rootLinkName: self.rootLinks.append({'name': rootLinkName}) def _setup(self): self.updates = Updates() self.rtypes = RTypes() for moduleName in self.submodules: module = reflect.namedModule(moduleName) self._scanModule(module) def getEndpoint(self, path): try: return self.matcher[path] except KeyError: raise exceptions.InvalidPathError( "Invalid path: " + "/".join([str(p) for p in path])) def getResourceType(self, name): return getattr(self.rtypes, name) @defer.inlineCallbacks def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): resultSpec = resultspec.ResultSpec(filters=filters, fields=fields, order=order, limit=limit, offset=offset) endpoint, kwargs = self.getEndpoint(path) rv = yield endpoint.get(resultSpec, kwargs) if resultSpec: rv = resultSpec.apply(rv) return rv def control(self, action, args, path): endpoint, kwargs = self.getEndpoint(path) return endpoint.control(action, args, kwargs) def produceEvent(self, rtype, msg, event): # warning, this is temporary api, until all code is migrated to data # api rsrc = self.getResourceType(rtype) return rsrc.produceEvent(msg, event) def allEndpoints(self): """return the full spec of the connector as a list of dicts """ paths = [] for k, v in sorted(self.matcher.iterPatterns()): paths.append(dict(path="/".join(k), plural=str(v.rtype.plural), type=str(v.rtype.entityType.name), type_spec=v.rtype.entityType.getSpec())) return paths buildbot-2.6.0/master/buildbot/data/exceptions.py000066400000000000000000000021451361162603000220660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # copy some exceptions from the DB layer from buildbot.db.schedulers import SchedulerAlreadyClaimedError __all__ = [ 'SchedulerAlreadyClaimedError', 'InvalidPathError', 'InvalidControlException', ] class DataException(Exception): pass class InvalidPathError(DataException): "A path argument was invalid or unknown" class InvalidControlException(DataException): "Action is not supported" buildbot-2.6.0/master/buildbot/data/forceschedulers.py000066400000000000000000000073131361162603000230670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.schedulers import forcesched from buildbot.www.rest import JSONRPC_CODES from buildbot.www.rest import BadJsonRpc2 def forceScheduler2Data(sched): ret = dict(all_fields=[], name=str(sched.name), button_name=str(sched.buttonName), label=str(sched.label), builder_names=[str(name) for name in sched.builderNames], enabled=sched.enabled) ret["all_fields"] = [field.getSpec() for field in sched.all_fields] return ret class ForceSchedulerEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /forceschedulers/i:schedulername """ def findForceScheduler(self, schedulername): # eventually this may be db backed. This is why the API is async for sched in self.master.allSchedulers(): if sched.name == schedulername and isinstance(sched, forcesched.ForceScheduler): return defer.succeed(sched) @defer.inlineCallbacks def get(self, resultSpec, kwargs): sched = yield self.findForceScheduler(kwargs['schedulername']) if sched is not None: return forceScheduler2Data(sched) @defer.inlineCallbacks def control(self, action, args, kwargs): if action == "force": sched = yield self.findForceScheduler(kwargs['schedulername']) if "owner" not in args: args['owner'] = "user" try: res = yield sched.force(**args) return res except forcesched.CollectedValidationError as e: raise BadJsonRpc2(e.errors, JSONRPC_CODES["invalid_params"]) return None class ForceSchedulersEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /forceschedulers /builders/:builderid/forceschedulers """ rootLinkName = 'forceschedulers' @defer.inlineCallbacks def get(self, resultSpec, kwargs): ret = [] builderid = kwargs.get('builderid', None) if builderid is not None: bdict = yield self.master.db.builders.getBuilder(builderid) for sched in self.master.allSchedulers(): if isinstance(sched, forcesched.ForceScheduler): if builderid is not None and bdict['name'] not in sched.builderNames: continue ret.append(forceScheduler2Data(sched)) return ret class ForceScheduler(base.ResourceType): name = "forcescheduler" plural = "forceschedulers" endpoints = [ForceSchedulerEndpoint, ForceSchedulersEndpoint] keyFields = [] class EntityType(types.Entity): name = types.Identifier(50) button_name = types.String() label = types.String() builder_names = types.List(of=types.Identifier(50)) enabled = types.Boolean() all_fields = types.List(of=types.JsonObject()) entityType = EntityType(name) buildbot-2.6.0/master/buildbot/data/logchunks.py000066400000000000000000000113151361162603000217010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class LogChunkEndpointBase(base.BuildNestingMixin, base.Endpoint): @defer.inlineCallbacks def getLogIdAndDbDictFromKwargs(self, kwargs): # calculate the logid if 'logid' in kwargs: logid = kwargs['logid'] dbdict = None else: stepid = yield self.getStepid(kwargs) if stepid is None: return (None, None) dbdict = yield self.master.db.logs.getLogBySlug(stepid, kwargs.get('log_slug')) if not dbdict: return (None, None) logid = dbdict['id'] return (logid, dbdict) class LogChunkEndpoint(LogChunkEndpointBase): # Note that this is a singular endpoint, even though it overrides the # offset/limit query params in ResultSpec isCollection = False pathPatterns = """ /logs/n:logid/contents /steps/n:stepid/logs/i:log_slug/contents /builds/n:buildid/steps/i:step_name/logs/i:log_slug/contents /builds/n:buildid/steps/n:step_number/logs/i:log_slug/contents /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug/contents /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug/contents """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): logid, dbdict = yield self.getLogIdAndDbDictFromKwargs(kwargs) if logid is None: return firstline = int(resultSpec.offset or 0) lastline = None if resultSpec.limit is None else firstline + \ int(resultSpec.limit) - 1 resultSpec.removePagination() # get the number of lines, if necessary if lastline is None: if not dbdict: dbdict = yield self.master.db.logs.getLog(logid) if not dbdict: return lastline = int(max(0, dbdict['num_lines'] - 1)) # bounds checks if firstline < 0 or lastline < 0 or firstline > lastline: return logLines = yield self.master.db.logs.getLogLines( logid, firstline, lastline) return {'logid': logid, 'firstline': firstline, 'content': logLines} class RawLogChunkEndpoint(LogChunkEndpointBase): # Note that this is a singular endpoint, even though it overrides the # offset/limit query params in ResultSpec isCollection = False isRaw = True pathPatterns = """ /logs/n:logid/raw /steps/n:stepid/logs/i:log_slug/raw /builds/n:buildid/steps/i:step_name/logs/i:log_slug/raw /builds/n:buildid/steps/n:step_number/logs/i:log_slug/raw /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug/raw /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug/raw """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): logid, dbdict = yield self.getLogIdAndDbDictFromKwargs(kwargs) if logid is None: return if not dbdict: dbdict = yield self.master.db.logs.getLog(logid) if not dbdict: return lastline = max(0, dbdict['num_lines'] - 1) logLines = yield self.master.db.logs.getLogLines( logid, 0, lastline) if dbdict['type'] == 's': logLines = "\n".join([line[1:] for line in logLines.splitlines()]) return {'raw': logLines, 'mime-type': 'text/html' if dbdict['type'] == 'h' else 'text/plain', 'filename': dbdict['slug']} class LogChunk(base.ResourceType): name = "logchunk" plural = "logchunks" endpoints = [LogChunkEndpoint, RawLogChunkEndpoint] keyFields = ['stepid', 'logid'] class EntityType(types.Entity): logid = types.Integer() firstline = types.Integer() content = types.String() entityType = EntityType(name) buildbot-2.6.0/master/buildbot/data/logs.py000066400000000000000000000121311361162603000206450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types from buildbot.util import identifiers class EndpointMixin: def db2data(self, dbdict): data = { 'logid': dbdict['id'], 'name': dbdict['name'], 'slug': dbdict['slug'], 'stepid': dbdict['stepid'], 'complete': dbdict['complete'], 'num_lines': dbdict['num_lines'], 'type': dbdict['type'], } return defer.succeed(data) class LogEndpoint(EndpointMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /logs/n:logid /steps/n:stepid/logs/i:log_slug /builds/n:buildid/steps/i:step_name/logs/i:log_slug /builds/n:buildid/steps/n:step_number/logs/i:log_slug /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs/i:log_slug /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs/i:log_slug /builders/i:buildername/builds/n:build_number/steps/i:step_name/logs/i:log_slug /builders/i:buildername/builds/n:build_number/steps/n:step_number/logs/i:log_slug """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'logid' in kwargs: dbdict = yield self.master.db.logs.getLog(kwargs['logid']) return (yield self.db2data(dbdict)) if dbdict else None stepid = yield self.getStepid(kwargs) if stepid is None: return dbdict = yield self.master.db.logs.getLogBySlug(stepid, kwargs.get('log_slug')) return (yield self.db2data(dbdict)) if dbdict else None class LogsEndpoint(EndpointMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /steps/n:stepid/logs /builds/n:buildid/steps/i:step_name/logs /builds/n:buildid/steps/n:step_number/logs /builders/n:builderid/builds/n:build_number/steps/i:step_name/logs /builders/n:builderid/builds/n:build_number/steps/n:step_number/logs /builders/i:buildername/builds/n:build_number/steps/i:step_name/logs /builders/i:buildername/builds/n:build_number/steps/n:step_number/logs """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): stepid = yield self.getStepid(kwargs) if not stepid: return [] logs = yield self.master.db.logs.getLogs(stepid=stepid) results = [] for dbdict in logs: results.append((yield self.db2data(dbdict))) return results class Log(base.ResourceType): name = "log" plural = "logs" endpoints = [LogEndpoint, LogsEndpoint] keyFields = ['stepid', 'logid'] eventPathPatterns = """ /logs/:logid /steps/:stepid/logs/:slug """ class EntityType(types.Entity): logid = types.Integer() name = types.String() slug = types.Identifier(50) stepid = types.Integer() complete = types.Boolean() num_lines = types.Integer() type = types.Identifier(1) entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, _id, event): # get the build and munge the result for the notification build = yield self.master.data.get(('logs', str(_id))) self.produceEvent(build, event) @base.updateMethod @defer.inlineCallbacks def addLog(self, stepid, name, type): slug = identifiers.forceIdentifier(50, name) while True: try: logid = yield self.master.db.logs.addLog( stepid=stepid, name=name, slug=slug, type=type) except KeyError: slug = identifiers.incrementIdentifier(50, slug) continue self.generateEvent(logid, "new") return logid @base.updateMethod @defer.inlineCallbacks def appendLog(self, logid, content): res = yield self.master.db.logs.appendLog(logid=logid, content=content) self.generateEvent(logid, "append") return res @base.updateMethod @defer.inlineCallbacks def finishLog(self, logid): res = yield self.master.db.logs.finishLog(logid=logid) self.generateEvent(logid, "finished") return res @base.updateMethod def compressLog(self, logid): return self.master.db.logs.compressLog(logid=logid) buildbot-2.6.0/master/buildbot/data/masters.py000066400000000000000000000155211361162603000213650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.data import base from buildbot.data import resultspec from buildbot.data import types from buildbot.process.results import RETRY from buildbot.util import epoch2datetime # time, in minutes, after which a master that hasn't checked in will be # marked as inactive EXPIRE_MINUTES = 10 def _db2data(master): return dict(masterid=master['id'], name=master['name'], active=master['active'], last_active=master['last_active']) class MasterEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /masters/n:masterid /builders/n:builderid/masters/n:masterid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): # if a builder is given, only return the master if it's associated with # this builder if 'builderid' in kwargs: builder = yield self.master.db.builders.getBuilder( builderid=kwargs['builderid']) if not builder or kwargs['masterid'] not in builder['masterids']: return None m = yield self.master.db.masters.getMaster(kwargs['masterid']) return _db2data(m) if m else None class MastersEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /masters /builders/n:builderid/masters """ rootLinkName = 'masters' @defer.inlineCallbacks def get(self, resultSpec, kwargs): masterlist = yield self.master.db.masters.getMasters() if 'builderid' in kwargs: builder = yield self.master.db.builders.getBuilder( builderid=kwargs['builderid']) if builder: masterids = set(builder['masterids']) masterlist = [m for m in masterlist if m['id'] in masterids] else: masterlist = [] return [_db2data(m) for m in masterlist] class Master(base.ResourceType): name = "master" plural = "masters" endpoints = [MasterEndpoint, MastersEndpoint] eventPathPatterns = """ /masters/:masterid """ class EntityType(types.Entity): masterid = types.Integer() name = types.String() active = types.Boolean() last_active = types.DateTime() entityType = EntityType(name) @base.updateMethod @defer.inlineCallbacks def masterActive(self, name, masterid): activated = yield self.master.db.masters.setMasterState( masterid=masterid, active=True) if activated: self.produceEvent( dict(masterid=masterid, name=name, active=True), 'started') @base.updateMethod @defer.inlineCallbacks def expireMasters(self, forceHouseKeeping=False): too_old = epoch2datetime(self.master.reactor.seconds() - 60 * EXPIRE_MINUTES) masters = yield self.master.db.masters.getMasters() for m in masters: if m['last_active'] is not None and m['last_active'] >= too_old: continue # mark the master inactive, and send a message on its behalf deactivated = yield self.master.db.masters.setMasterState( masterid=m['id'], active=False) if deactivated: yield self._masterDeactivated(m['id'], m['name']) elif forceHouseKeeping: yield self._masterDeactivatedHousekeeping(m['id'], m['name']) @base.updateMethod @defer.inlineCallbacks def masterStopped(self, name, masterid): deactivated = yield self.master.db.masters.setMasterState( masterid=masterid, active=False) if deactivated: yield self._masterDeactivated(masterid, name) @defer.inlineCallbacks def _masterDeactivatedHousekeeping(self, masterid, name): log.msg("doing housekeeping for master %d %s" % (masterid, name)) # common code for deactivating a master yield self.master.data.rtypes.worker._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.builder._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.scheduler._masterDeactivated( masterid=masterid) yield self.master.data.rtypes.changesource._masterDeactivated( masterid=masterid) # for each build running on that instance.. builds = yield self.master.data.get(('builds',), filters=[resultspec.Filter('masterid', 'eq', [masterid]), resultspec.Filter('complete', 'eq', [False])]) for build in builds: # stop any running steps.. steps = yield self.master.data.get( ('builds', build['buildid'], 'steps'), filters=[resultspec.Filter('results', 'eq', [None])]) for step in steps: # finish remaining logs for those steps.. logs = yield self.master.data.get( ('steps', step['stepid'], 'logs'), filters=[resultspec.Filter('complete', 'eq', [False])]) for _log in logs: yield self.master.data.updates.finishLog( logid=_log['logid']) yield self.master.data.updates.finishStep( stepid=step['stepid'], results=RETRY, hidden=False) # then stop the build itself yield self.master.data.updates.finishBuild( buildid=build['buildid'], results=RETRY) # unclaim all of the build requests owned by the deactivated instance buildrequests = yield self.master.db.buildrequests.getBuildRequests( complete=False, claimed=masterid) yield self.master.db.buildrequests.unclaimBuildRequests( brids=[br['buildrequestid'] for br in buildrequests]) @defer.inlineCallbacks def _masterDeactivated(self, masterid, name): yield self._masterDeactivatedHousekeeping(masterid, name) self.produceEvent( dict(masterid=masterid, name=name, active=False), 'stopped') buildbot-2.6.0/master/buildbot/data/patches.py000066400000000000000000000022671361162603000213410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.data import base from buildbot.data import types # NOTE: patches are not available via endpoints class Patch(base.ResourceType): name = "patch" plural = "patches" endpoints = [] keyFields = ['patchid'] class EntityType(types.Entity): patchid = types.Integer() body = types.Binary() level = types.Integer() subdir = types.String() author = types.String() comment = types.String() entityType = EntityType(name) buildbot-2.6.0/master/buildbot/data/properties.py000066400000000000000000000061031361162603000220770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class BuildsetPropertiesEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /buildsets/n:bsid/properties """ def get(self, resultSpec, kwargs): return self.master.db.buildsets.getBuildsetProperties(kwargs['bsid']) class BuildPropertiesEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /builds/n:buildid/properties """ def get(self, resultSpec, kwargs): return self.master.db.builds.getBuildProperties(kwargs['buildid']) class Properties(base.ResourceType): name = "property" plural = "properties" endpoints = [BuildsetPropertiesEndpoint, BuildPropertiesEndpoint] keyFields = [] entityType = types.SourcedProperties() def generateUpdateEvent(self, buildid, newprops): # This event cannot use the produceEvent mechanism, as the properties resource type is a bit specific # (this is a dictionary collection) # We only send the new properties, and count on the client to merge the resulting properties dict # We are good, as there is no way to delete a property. routingKey = ('builds', str(buildid), "properties", "update") newprops = self.sanitizeMessage(newprops) return self.master.mq.produce(routingKey, newprops) @base.updateMethod @defer.inlineCallbacks def setBuildProperties(self, buildid, properties): to_update = {} oldproperties = yield self.master.data.get(('builds', str(buildid), "properties")) properties = properties.getProperties() properties = yield properties.render(properties.asDict()) for k, v in properties.items(): if k in oldproperties and oldproperties[k] == v: continue to_update[k] = v if to_update: for k, v in to_update.items(): yield self.master.db.builds.setBuildProperty( buildid, k, v[0], v[1]) yield self.generateUpdateEvent(buildid, to_update) @base.updateMethod @defer.inlineCallbacks def setBuildProperty(self, buildid, name, value, source): res = yield self.master.db.builds.setBuildProperty( buildid, name, value, source) yield self.generateUpdateEvent(buildid, dict(name=(value, source))) return res buildbot-2.6.0/master/buildbot/data/resultspec.py000066400000000000000000000337361361162603000221100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.python import log from buildbot.data import base class FieldBase: """ This class implements a basic behavior to wrap value into a `Field` instance """ __slots__ = ['field', 'op', 'values'] singular_operators = { 'eq': lambda d, v: d == v[0], 'ne': lambda d, v: d != v[0], 'lt': lambda d, v: d < v[0], 'le': lambda d, v: d <= v[0], 'gt': lambda d, v: d > v[0], 'ge': lambda d, v: d >= v[0], 'contains': lambda d, v: v[0] in d, } singular_operators_sql = { 'eq': lambda d, v: d == v[0], 'ne': lambda d, v: d != v[0], 'lt': lambda d, v: d < v[0], 'le': lambda d, v: d <= v[0], 'gt': lambda d, v: d > v[0], 'ge': lambda d, v: d >= v[0], 'contains': lambda d, v: d.contains(v[0]) # only support string values, because currently there are no queries against lists in SQL } plural_operators = { 'eq': lambda d, v: d in v, 'ne': lambda d, v: d not in v, 'contains': lambda d, v: len(set(v).intersection(set(d))) > 0, } plural_operators_sql = { 'eq': lambda d, v: d.in_(v), 'ne': lambda d, v: d.notin_(v), 'contains': lambda d, vs: sa.or_(*[d.contains(v) for v in vs]), # sqlalchemy v0.8's or_ cannot take generator arguments, so this has to be manually expanded # only support string values, because currently there are no queries against lists in SQL } def __init__(self, field, op, values): self.field = field self.op = op self.values = values def getOperator(self, sqlMode=False): v = self.values if len(v) == 1: if sqlMode: ops = self.singular_operators_sql else: ops = self.singular_operators else: if sqlMode: ops = self.plural_operators_sql else: ops = self.plural_operators v = set(v) return ops[self.op] def apply(self, data): fld = self.field v = self.values f = self.getOperator() return (d for d in data if f(d[fld], v)) def __repr__(self): return "resultspec.{}('{}','{}',{})".format(self.__class__.__name__, self.field, self.op, self.values) def __eq__(self, b): for i in self.__slots__: if getattr(self, i) != getattr(b, i): return False return True def __ne__(self, b): return not (self == b) class Property(FieldBase): """ Wraps ``property`` type value(s) """ class Filter(FieldBase): """ Wraps ``filter`` type value(s) """ class NoneComparator: """ Object which wraps 'None' when doing comparisons in sorted(). '> None' and '< None' are not supported in Python 3. """ def __init__(self, value): self.value = value def __lt__(self, other): if self.value is None and other.value is None: return False elif self.value is None: return True elif other.value is None: return False return self.value < other.value def __eq__(self, other): return self.value == other.value def __ne__(self, other): return self.value != other.value def __gt_(self, other): if self.value is None and other.value is None: return False elif self.value is None: return False elif other.value is None: return True return self.value < other.value class ReverseComparator: """ Object which swaps '<' and '>' so instead of a < b, it does b < a, and instead of a > b, it does b > a. This can be used in reverse comparisons. """ def __init__(self, value): self.value = value def __lt__(self, other): return other.value < self.value def __eq__(self, other): return other.value == self.value def __ne__(self, other): return other.value != self.value def __gt_(self, other): return other.value > self.value class ResultSpec: __slots__ = ['filters', 'fields', 'properties', 'order', 'limit', 'offset', 'fieldMapping'] def __init__(self, filters=None, fields=None, properties=None, order=None, limit=None, offset=None): self.filters = filters or [] self.properties = properties or [] self.fields = fields self.order = order self.limit = limit self.offset = offset self.fieldMapping = {} def __repr__(self): return ("ResultSpec(**{{'filters': {}, 'fields': {}, 'properties': {}, " "'order': {}, 'limit': {}, 'offset': {}").format( self.filters, self.fields, self.properties, self.order, self.limit, self.offset) + "})" def __eq__(self, b): for i in ['filters', 'fields', 'properties', 'order', 'limit', 'offset']: if getattr(self, i) != getattr(b, i): return False return True def __ne__(self, b): return not (self == b) def popProperties(self): values = [] for p in self.properties: if p.field == b'property' and p.op == 'eq': self.properties.remove(p) values = p.values break return values def popFilter(self, field, op): for f in self.filters: if f.field == field and f.op == op: self.filters.remove(f) return f.values def popOneFilter(self, field, op): v = self.popFilter(field, op) return v[0] if v is not None else None def popBooleanFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: return eqVals[0] neVals = self.popFilter(field, 'ne') if neVals and len(neVals) == 1: return not neVals[0] def popStringFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: return eqVals[0] def popIntegerFilter(self, field): eqVals = self.popFilter(field, 'eq') if eqVals and len(eqVals) == 1: try: return int(eqVals[0]) except ValueError: raise ValueError("Filter value for {} should be integer, but got: {}".format( field, eqVals[0])) def removePagination(self): self.limit = self.offset = None def removeOrder(self): self.order = None def popField(self, field): try: i = self.fields.index(field) except ValueError: return False del self.fields[i] return True def findColumn(self, query, field): # will throw key error if field not in mapping mapped = self.fieldMapping[field] for col in query.inner_columns: if str(col) == mapped: return col raise KeyError("unable to find field {} in query".format(field)) def applyFilterToSQLQuery(self, query, f): field = f.field col = self.findColumn(query, field) # as sqlalchemy is overriding python operators, we can just use the same # python code generated by the filter return query.where(f.getOperator(sqlMode=True)(col, f.values)) def applyOrderToSQLQuery(self, query, o): reverse = False if o.startswith('-'): reverse = True o = o[1:] col = self.findColumn(query, o) if reverse: col = col.desc() return query.order_by(col) def applyToSQLQuery(self, query): filters = self.filters order = self.order unmatched_filters = [] unmatched_order = [] # apply the filters if the name of field is found in the model, and # db2data for f in filters: try: query = self.applyFilterToSQLQuery(query, f) except KeyError: # if filter is unmatched, we will do the filtering manually in # self.apply unmatched_filters.append(f) # apply order if necessary if order: for o in order: try: query = self.applyOrderToSQLQuery(query, o) except KeyError: # if order is unmatched, we will do the ordering manually # in self.apply unmatched_order.append(o) # we cannot limit in sql if there is missing filtering or ordering if unmatched_filters or unmatched_order: if self.offset is not None or self.limit is not None: log.msg("Warning: limited data api query is not backed by db because of following filters", unmatched_filters, unmatched_order) self.filters = unmatched_filters self.order = tuple(unmatched_order) return query, None count_query = sa.select([sa.func.count()]).select_from(query.alias('query')) self.order = None self.filters = [] # finally, slice out the limit/offset if self.offset is not None: query = query.offset(self.offset) self.offset = None if self.limit is not None: query = query.limit(self.limit) self.limit = None return query, count_query def thd_execute(self, conn, q, dictFromRow): offset, limit = self.offset, self.limit q, qc = self.applyToSQLQuery(q) res = conn.execute(q) rv = [dictFromRow(row) for row in res.fetchall()] if qc is not None and (offset or limit): total = conn.execute(qc).scalar() rv = base.ListResult(rv) rv.offset, rv.total, rv.limit = offset, total, limit return rv def apply(self, data): if data is None: return data if self.fields: fields = set(self.fields) def includeFields(d): return dict((k, v) for k, v in d.items() if k in fields) applyFields = includeFields else: fields = None if isinstance(data, dict): # item details if fields: data = applyFields(data) return data else: filters = self.filters order = self.order # item collection if isinstance(data, base.ListResult): # if pagination was applied, then fields, etc. must be empty assert not fields and not order and not filters, \ "endpoint must apply fields, order, and filters if it performs pagination" offset, total = data.offset, data.total limit = data.limit else: offset, total = None, None limit = None if fields: data = (applyFields(d) for d in data) # link the filters together and then flatten to list for f in self.filters: data = f.apply(data) data = list(data) if total is None: total = len(data) if self.order: def keyFunc(elem, order=self.order): """ Do a multi-level sort by passing in the keys to sort by. @param elem: each item in the list to sort. It must be a C{dict} @param order: a list of keys to sort by, such as: ('lastName', 'firstName', 'age') @return: a key used by sorted(). This will be a list such as: [a['lastName', a['firstName'], a['age']] @rtype: a C{list} """ compareKey = [] for k in order: doReverse = False if k[0] == '-': # If we get a key '-lastName', # it means sort by 'lastName' in reverse. k = k[1:] doReverse = True val = NoneComparator(elem[k]) if doReverse: val = ReverseComparator(val) compareKey.append(val) return compareKey data.sort(key=keyFunc) # finally, slice out the limit/offset if self.offset is not None or self.limit is not None: if offset is not None or limit is not None: raise AssertionError("endpoint must clear offset/limit") end = ((self.offset or 0) + self.limit if self.limit is not None else None) data = data[self.offset:end] offset = self.offset limit = self.limit rv = base.ListResult(data) rv.offset, rv.total = offset, total rv.limit = limit return rv # a resultSpec which does not implement filtering in python (for tests) class OptimisedResultSpec(ResultSpec): def apply(self, data): return data buildbot-2.6.0/master/buildbot/data/root.py000066400000000000000000000032521361162603000206700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class RootEndpoint(base.Endpoint): isCollection = True pathPatterns = "/" def get(self, resultSpec, kwargs): return defer.succeed(self.master.data.rootLinks) class Root(base.ResourceType): name = "rootlink" plural = "rootlinks" endpoints = [RootEndpoint] class EntityType(types.Entity): name = types.String() entityType = EntityType(name) class SpecEndpoint(base.Endpoint): isCollection = True pathPatterns = "/application.spec" def get(self, resultSpec, kwargs): return defer.succeed(self.master.data.allEndpoints()) class Spec(base.ResourceType): name = "spec" plural = "specs" endpoints = [SpecEndpoint] class EntityType(types.Entity): path = types.String() type = types.String() plural = types.String() type_spec = types.JsonObject() entityType = EntityType(name) buildbot-2.6.0/master/buildbot/data/schedulers.py000066400000000000000000000111321361162603000220420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import masters from buildbot.data import types from buildbot.db.schedulers import SchedulerAlreadyClaimedError class Db2DataMixin: @defer.inlineCallbacks def db2data(self, dbdict): master = None if dbdict['masterid'] is not None: master = yield self.master.data.get( ('masters', dbdict['masterid'])) data = { 'schedulerid': dbdict['id'], 'name': dbdict['name'], 'enabled': dbdict['enabled'], 'master': master, } return data class SchedulerEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /schedulers/n:schedulerid /masters/n:masterid/schedulers/n:schedulerid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): dbdict = yield self.master.db.schedulers.getScheduler( kwargs['schedulerid']) if 'masterid' in kwargs: if dbdict['masterid'] != kwargs['masterid']: return return (yield self.db2data(dbdict)) if dbdict else None @defer.inlineCallbacks def control(self, action, args, kwargs): if action == 'enable': schedulerid = kwargs['schedulerid'] v = args['enabled'] yield self.master.data.updates.schedulerEnable(schedulerid, v) return None class SchedulersEndpoint(Db2DataMixin, base.Endpoint): isCollection = True pathPatterns = """ /schedulers /masters/n:masterid/schedulers """ rootLinkName = 'schedulers' @defer.inlineCallbacks def get(self, resultSpec, kwargs): schedulers = yield self.master.db.schedulers.getSchedulers( masterid=kwargs.get('masterid')) schdicts = yield defer.DeferredList( [self.db2data(schdict) for schdict in schedulers], consumeErrors=True, fireOnOneErrback=True) return [r for (s, r) in schdicts] class Scheduler(base.ResourceType): name = "scheduler" plural = "schedulers" endpoints = [SchedulerEndpoint, SchedulersEndpoint] keyFields = ['schedulerid'] eventPathPatterns = """ /schedulers/:schedulerid """ class EntityType(types.Entity): schedulerid = types.Integer() name = types.String() enabled = types.Boolean() master = types.NoneOk(masters.Master.entityType) entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, schedulerid, event): scheduler = yield self.master.data.get(('schedulers', str(schedulerid))) self.produceEvent(scheduler, event) @base.updateMethod @defer.inlineCallbacks def schedulerEnable(self, schedulerid, v): yield self.master.db.schedulers.enable(schedulerid, v) yield self.generateEvent(schedulerid, 'updated') return None @base.updateMethod def findSchedulerId(self, name): return self.master.db.schedulers.findSchedulerId(name) @base.updateMethod def trySetSchedulerMaster(self, schedulerid, masterid): d = self.master.db.schedulers.setSchedulerMaster( schedulerid, masterid) # set is successful: deferred result is True d.addCallback(lambda _: True) @d.addErrback def trapAlreadyClaimedError(why): # the db layer throws an exception if the claim fails; we squash # that error but let other exceptions continue upward why.trap(SchedulerAlreadyClaimedError) # set failed: deferred result is False return False return d @defer.inlineCallbacks def _masterDeactivated(self, masterid): schedulers = yield self.master.db.schedulers.getSchedulers( masterid=masterid) for sch in schedulers: yield self.master.db.schedulers.setSchedulerMaster(sch['id'], None) buildbot-2.6.0/master/buildbot/data/sourcestamps.py000066400000000000000000000052421361162603000224360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import patches from buildbot.data import types def _db2data(ss): data = { 'ssid': ss['ssid'], 'branch': ss['branch'], 'revision': ss['revision'], 'project': ss['project'], 'repository': ss['repository'], 'codebase': ss['codebase'], 'created_at': ss['created_at'], 'patch': None, } if ss['patch_body']: data['patch'] = { 'patchid': ss['patchid'], 'level': ss['patch_level'], 'subdir': ss['patch_subdir'], 'author': ss['patch_author'], 'comment': ss['patch_comment'], 'body': ss['patch_body'], } return data class SourceStampEndpoint(base.Endpoint): isCollection = False pathPatterns = """ /sourcestamps/n:ssid """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): ssdict = yield self.master.db.sourcestamps.getSourceStamp( kwargs['ssid']) return _db2data(ssdict) if ssdict else None class SourceStampsEndpoint(base.Endpoint): isCollection = True pathPatterns = """ /sourcestamps """ rootLinkName = 'sourcestamps' @defer.inlineCallbacks def get(self, resultSpec, kwargs): return [_db2data(ssdict) for ssdict in (yield self.master.db.sourcestamps.getSourceStamps())] class SourceStamp(base.ResourceType): name = "sourcestamp" plural = "sourcestamps" endpoints = [SourceStampEndpoint, SourceStampsEndpoint] keyFields = ['ssid'] class EntityType(types.Entity): ssid = types.Integer() revision = types.NoneOk(types.String()) branch = types.NoneOk(types.String()) repository = types.String() project = types.String() codebase = types.String() patch = types.NoneOk(patches.Patch.entityType) created_at = types.DateTime() entityType = EntityType(name) buildbot-2.6.0/master/buildbot/data/steps.py000066400000000000000000000126561361162603000210530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import types class Db2DataMixin: def db2data(self, dbdict): data = { 'stepid': dbdict['id'], 'number': dbdict['number'], 'name': dbdict['name'], 'buildid': dbdict['buildid'], 'started_at': dbdict['started_at'], 'complete': dbdict['complete_at'] is not None, 'complete_at': dbdict['complete_at'], 'state_string': dbdict['state_string'], 'results': dbdict['results'], 'urls': dbdict['urls'], 'hidden': dbdict['hidden'], } return defer.succeed(data) class StepEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = False pathPatterns = """ /steps/n:stepid /builds/n:buildid/steps/i:step_name /builds/n:buildid/steps/n:step_number /builders/n:builderid/builds/n:build_number/steps/i:step_name /builders/n:builderid/builds/n:build_number/steps/n:step_number /builders/i:buildername/builds/n:build_number/steps/i:step_name /builders/i:buildername/builds/n:build_number/steps/n:step_number """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'stepid' in kwargs: dbdict = yield self.master.db.steps.getStep(kwargs['stepid']) return (yield self.db2data(dbdict)) if dbdict else None buildid = yield self.getBuildid(kwargs) if buildid is None: return dbdict = yield self.master.db.steps.getStep( buildid=buildid, number=kwargs.get('step_number'), name=kwargs.get('step_name')) return (yield self.db2data(dbdict)) if dbdict else None class StepsEndpoint(Db2DataMixin, base.BuildNestingMixin, base.Endpoint): isCollection = True pathPatterns = """ /builds/n:buildid/steps /builders/n:builderid/builds/n:build_number/steps /builders/i:buildername/builds/n:build_number/steps """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): if 'buildid' in kwargs: buildid = kwargs['buildid'] else: buildid = yield self.getBuildid(kwargs) if buildid is None: return steps = yield self.master.db.steps.getSteps(buildid=buildid) results = [] for dbdict in steps: results.append((yield self.db2data(dbdict))) return results class Step(base.ResourceType): name = "step" plural = "steps" endpoints = [StepEndpoint, StepsEndpoint] keyFields = ['builderid', 'stepid'] eventPathPatterns = """ /builds/:buildid/steps/:stepid /steps/:stepid """ class EntityType(types.Entity): stepid = types.Integer() number = types.Integer() name = types.Identifier(50) buildid = types.Integer() started_at = types.NoneOk(types.DateTime()) complete = types.Boolean() complete_at = types.NoneOk(types.DateTime()) results = types.NoneOk(types.Integer()) state_string = types.String() urls = types.List( of=types.Dict( name=types.String(), url=types.String() )) hidden = types.Boolean() entityType = EntityType(name) @defer.inlineCallbacks def generateEvent(self, stepid, event): step = yield self.master.data.get(('steps', stepid)) self.produceEvent(step, event) @base.updateMethod @defer.inlineCallbacks def addStep(self, buildid, name): stepid, num, name = yield self.master.db.steps.addStep( buildid=buildid, name=name, state_string='pending') yield self.generateEvent(stepid, 'new') return (stepid, num, name) @base.updateMethod @defer.inlineCallbacks def startStep(self, stepid): yield self.master.db.steps.startStep(stepid=stepid) yield self.generateEvent(stepid, 'started') @base.updateMethod @defer.inlineCallbacks def setStepStateString(self, stepid, state_string): yield self.master.db.steps.setStepStateString( stepid=stepid, state_string=state_string) yield self.generateEvent(stepid, 'updated') @base.updateMethod @defer.inlineCallbacks def addStepURL(self, stepid, name, url): yield self.master.db.steps.addURL( stepid=stepid, name=name, url=url) yield self.generateEvent(stepid, 'updated') @base.updateMethod @defer.inlineCallbacks def finishStep(self, stepid, results, hidden): yield self.master.db.steps.finishStep( stepid=stepid, results=results, hidden=hidden) yield self.generateEvent(stepid, 'finished') buildbot-2.6.0/master/buildbot/data/types.py000066400000000000000000000254771361162603000210660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # See "Type Validation" in master/docs/developer/tests.rst import datetime import json import re from buildbot import util from buildbot.util import bytes2unicode class Type: name = None doc = None @property def ramlname(self): return self.name def valueFromString(self, arg): # convert a urldecoded bytestring as given in a URL to a value, or # raise an exception trying. This parent method raises an exception, # so if the method is missing in a subclass, it cannot be created from # a string. raise TypeError def cmp(self, val, arg): argVal = self.valueFromString(arg) if val < argVal: return -1 elif val == argVal: return 0 return 1 def validate(self, name, object): raise NotImplementedError def getSpec(self): r = dict(name=self.name) if self.doc is not None: r["doc"] = self.doc return r class NoneOk(Type): def __init__(self, nestedType): assert isinstance(nestedType, Type) self.nestedType = nestedType self.name = self.nestedType.name + " or None" @property def ramlname(self): return self.nestedType.ramlname def valueFromString(self, arg): return self.nestedType.valueFromString(arg) def cmp(self, val, arg): return self.nestedType.cmp(val, arg) def validate(self, name, object): if object is None: return for msg in self.nestedType.validate(name, object): yield msg def getSpec(self): r = self.nestedType.getSpec() r["can_be_null"] = True return r def toRaml(self): return self.nestedType.toRaml() class Instance(Type): types = () ramlType = "unknown" @property def ramlname(self): return self.ramlType def validate(self, name, object): if not isinstance(object, self.types): yield "%s (%r) is not a %s" % ( name, object, self.name or repr(self.types)) def toRaml(self): return self.ramlType class Integer(Instance): name = "integer" types = (int,) ramlType = "integer" def valueFromString(self, arg): return int(arg) class DateTime(Instance): name = "datetime" types = (datetime.datetime) ramlType = "date" class String(Instance): name = "string" types = (str,) ramlType = "string" def valueFromString(self, arg): val = util.bytes2unicode(arg) return val class Binary(Instance): name = "binary" types = (bytes,) ramlType = "string" def valueFromString(self, arg): return arg class Boolean(Instance): name = "boolean" types = (bool,) ramlType = "boolean" def valueFromString(self, arg): return util.string2boolean(arg) class Identifier(Type): name = "identifier" identRe = re.compile('^[a-zA-Z_-][a-zA-Z0-9._-]*$') ramlType = "string" def __init__(self, len=None, **kwargs): super().__init__(**kwargs) self.len = len def valueFromString(self, arg): val = util.bytes2unicode(arg) if not self.identRe.match(val) or len(val) > self.len or not val: raise TypeError return val def validate(self, name, object): if not isinstance(object, str): yield "%s - %r - is not a unicode string" % (name, object) elif not self.identRe.match(object): yield "%s - %r - is not an identifier" % (name, object) elif not object: yield "%s - identifiers cannot be an empty string" % (name,) elif len(object) > self.len: yield "%s - %r - is longer than %d characters" % (name, object, self.len) def toRaml(self): return {'type': self.ramlType, 'pattern': self.identRe.pattern} class List(Type): name = "list" ramlType = "list" @property def ramlname(self): return self.of.ramlname def __init__(self, of=None, **kwargs): super().__init__(**kwargs) self.of = of def validate(self, name, object): if not isinstance(object, list): # we want a list, and NOT a subclass yield "%s (%r) is not a %s" % (name, object, self.name) return for idx, elt in enumerate(object): for msg in self.of.validate("%s[%d]" % (name, idx), elt): yield msg def valueFromString(self, arg): # valueFromString is used to process URL args, which come one at # a time, so we defer to the `of` return self.of.valueFromString(arg) def getSpec(self): return dict(type=self.name, of=self.of.getSpec()) def toRaml(self): return {'type': 'array', 'items': self.of.name} def maybeNoneOrList(k, v): if isinstance(v, NoneOk): return k + "?" if isinstance(v, List): return k + "[]" return k class SourcedProperties(Type): name = "sourcedproperties" def validate(self, name, object): if not isinstance(object, dict): # we want a dict, and NOT a subclass yield "%s is not sourced properties (not a dict)" % (name,) return for k, v in object.items(): if not isinstance(k, str): yield "%s property name %r is not unicode" % (name, k) if not isinstance(v, tuple) or len(v) != 2: yield "%s property value for '%s' is not a 2-tuple" % (name, k) return propval, propsrc = v if not isinstance(propsrc, str): yield "%s[%s] source %r is not unicode" % (name, k, propsrc) try: json.loads(bytes2unicode(propval)) except ValueError: yield "%s[%r] value is not JSON-able" % (name, k) def toRaml(self): return {'type': "object", 'properties': {'[]': {'type': 'object', 'properties': { 1: 'string', 2: 'integer | string | object | array | boolean' } }}} class Dict(Type): name = "dict" @property def ramlname(self): return self.toRaml() def __init__(self, **contents): self.contents = contents self.keys = set(contents) def validate(self, name, object): if not isinstance(object, dict): yield "%s (%r) is not a dictionary (got type %s)" \ % (name, object, type(object)) return gotNames = set(object.keys()) unexpected = gotNames - self.keys if unexpected: yield "%s has unexpected keys %s" % (name, ", ".join([repr(n) for n in unexpected])) missing = self.keys - gotNames if missing: yield "%s is missing keys %s" % (name, ", ".join([repr(n) for n in missing])) for k in gotNames & self.keys: f = self.contents[k] for msg in f.validate("%s[%r]" % (name, k), object[k]): yield msg def getSpec(self): return dict(type=self.name, fields=[dict(name=k, type=v.name, type_spec=v.getSpec()) for k, v in self.contents.items() ]) def toRaml(self): return {'type': "object", 'properties': {maybeNoneOrList(k, v): v.ramlname for k, v in self.contents.items()}} class JsonObject(Type): name = "jsonobject" ramlname = 'object' def validate(self, name, object): if not isinstance(object, dict): yield "%s (%r) is not a dictionary (got type %s)" \ % (name, object, type(object)) return # make sure JSON can represent it try: json.dumps(object) except Exception as e: yield "%s is not JSON-able: %s" % (name, e) return def toRaml(self): return "object" class Entity(Type): # NOTE: this type is defined by subclassing it in each resource type class. # Instances are generally accessed at e.g., # * buildsets.Buildset.entityType or # * self.master.data.rtypes.buildsets.entityType name = None # set in constructor fields = {} fieldNames = set([]) def __init__(self, name): fields = {} for k, v in self.__class__.__dict__.items(): if isinstance(v, Type): fields[k] = v self.fields = fields self.fieldNames = set(fields) self.name = name def validate(self, name, object): # this uses isinstance, allowing dict subclasses as used by the DB API if not isinstance(object, dict): yield "%s (%r) is not a dictionary (got type %s)" \ % (name, object, type(object)) return gotNames = set(object.keys()) unexpected = gotNames - self.fieldNames if unexpected: yield "%s has unexpected keys %s" % (name, ", ".join([repr(n) for n in unexpected])) missing = self.fieldNames - gotNames if missing: yield "%s is missing keys %s" % (name, ", ".join([repr(n) for n in missing])) for k in gotNames & self.fieldNames: f = self.fields[k] for msg in f.validate("%s[%r]" % (name, k), object[k]): yield msg def getSpec(self): return dict(type=self.name, fields=[dict(name=k, type=v.name, type_spec=v.getSpec()) for k, v in self.fields.items() ]) def toRaml(self): return {'type': "object", 'properties': { maybeNoneOrList(k, v): {'type': v.ramlname, 'description': ''} for k, v in self.fields.items()}} buildbot-2.6.0/master/buildbot/data/workers.py000066400000000000000000000147161361162603000214100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import exceptions from buildbot.data import types from buildbot.util import identifiers class Db2DataMixin: def db2data(self, dbdict): return { 'workerid': dbdict['id'], 'name': dbdict['name'], 'workerinfo': dbdict['workerinfo'], 'paused': dbdict['paused'], 'graceful': dbdict['graceful'], 'connected_to': [ {'masterid': id} for id in dbdict['connected_to']], 'configured_on': [ {'masterid': c['masterid'], 'builderid': c['builderid']} for c in dbdict['configured_on']], } class WorkerEndpoint(Db2DataMixin, base.Endpoint): isCollection = False pathPatterns = """ /workers/n:workerid /workers/i:name /masters/n:masterid/workers/n:workerid /masters/n:masterid/workers/i:name /masters/n:masterid/builders/n:builderid/workers/n:workerid /masters/n:masterid/builders/n:builderid/workers/i:name /builders/n:builderid/workers/n:workerid /builders/n:builderid/workers/i:name """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): sldict = yield self.master.db.workers.getWorker( workerid=kwargs.get('workerid'), name=kwargs.get('name'), masterid=kwargs.get('masterid'), builderid=kwargs.get('builderid')) if sldict: return self.db2data(sldict) @defer.inlineCallbacks def control(self, action, args, kwargs): if action not in ("stop", "pause", "unpause", "kill"): raise exceptions.InvalidControlException("action: {} is not supported".format(action)) worker = yield self.get(None, kwargs) if worker is not None: self.master.mq.produce(("control", "worker", str(worker['workerid']), action), dict(reason=kwargs.get('reason', args.get('reason', 'no reason')))) else: raise exceptions.exceptions.InvalidPathError("worker not found") class WorkersEndpoint(Db2DataMixin, base.Endpoint): isCollection = True rootLinkName = 'workers' pathPatterns = """ /workers /masters/n:masterid/workers /masters/n:masterid/builders/n:builderid/workers /builders/n:builderid/workers """ @defer.inlineCallbacks def get(self, resultSpec, kwargs): paused = resultSpec.popBooleanFilter('paused') graceful = resultSpec.popBooleanFilter('graceful') workers_dicts = yield self.master.db.workers.getWorkers( builderid=kwargs.get('builderid'), masterid=kwargs.get('masterid'), paused=paused, graceful=graceful) return [self.db2data(w) for w in workers_dicts] class Worker(base.ResourceType): name = "worker" plural = "workers" endpoints = [WorkerEndpoint, WorkersEndpoint] keyFields = ['workerid'] eventPathPatterns = """ /workers/:workerid """ class EntityType(types.Entity): workerid = types.Integer() name = types.String() connected_to = types.List(of=types.Dict( masterid=types.Integer())) configured_on = types.List(of=types.Dict( masterid=types.Integer(), builderid=types.Integer())) workerinfo = types.JsonObject() paused = types.Boolean() graceful = types.Boolean() entityType = EntityType(name) @base.updateMethod # returns a Deferred that returns None def workerConfigured(self, workerid, masterid, builderids): return self.master.db.workers.workerConfigured( workerid=workerid, masterid=masterid, builderids=builderids) @base.updateMethod def findWorkerId(self, name): if not identifiers.isIdentifier(50, name): raise ValueError( "Worker name %r is not a 50-character identifier" % (name,)) return self.master.db.workers.findWorkerId(name) @base.updateMethod @defer.inlineCallbacks def workerConnected(self, workerid, masterid, workerinfo): yield self.master.db.workers.workerConnected( workerid=workerid, masterid=masterid, workerinfo=workerinfo) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'connected') @base.updateMethod @defer.inlineCallbacks def workerDisconnected(self, workerid, masterid): yield self.master.db.workers.workerDisconnected( workerid=workerid, masterid=masterid) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'disconnected') @base.updateMethod @defer.inlineCallbacks def workerMissing(self, workerid, masterid, last_connection, notify): bs = yield self.master.data.get(('workers', workerid)) bs['last_connection'] = last_connection bs['notify'] = notify self.produceEvent(bs, 'missing') @base.updateMethod @defer.inlineCallbacks def setWorkerState(self, workerid, paused, graceful): yield self.master.db.workers.setWorkerState( workerid=workerid, paused=paused, graceful=graceful) bs = yield self.master.data.get(('workers', workerid)) self.produceEvent(bs, 'state_updated') @base.updateMethod def deconfigureAllWorkersForMaster(self, masterid): # unconfigure all workers for this master return self.master.db.workers.deconfigureAllWorkersForMaster( masterid=masterid) def _masterDeactivated(self, masterid): return self.deconfigureAllWorkersForMaster(masterid) buildbot-2.6.0/master/buildbot/db/000077500000000000000000000000001361162603000170055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/__init__.py000066400000000000000000000014671361162603000211260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # a NULL constant to use in sqlalchemy whereclauses e.g. (tbl.c.results == NULL) # so that pep8 is happy NULL = None buildbot-2.6.0/master/buildbot/db/base.py000066400000000000000000000130251361162603000202720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import itertools import sqlalchemy as sa from buildbot.util import unicode2bytes class DBConnectorComponent: # A fixed component of the DBConnector, handling one particular aspect of # the database. Instances of subclasses are assigned to attributes of the # DBConnector object, so that they are available at e.g., # C{master.db.model} or C{master.db.changes}. This parent class takes care # of the necessary backlinks and other housekeeping. connector = None data2db = {} def __init__(self, connector): self.db = connector # set up caches for method in dir(self.__class__): o = getattr(self, method) if isinstance(o, CachedMethod): setattr(self, method, o.get_cached_method(self)) @property def master(self): return self.db.master _isCheckLengthNecessary = None def checkLength(self, col, value): if not self._isCheckLengthNecessary: if self.db.pool.engine.dialect.name == 'mysql': self._isCheckLengthNecessary = True else: # not necessary, so just stub out the method self.checkLength = lambda col, value: None return assert col.type.length, "column %s does not have a length" % (col,) if value and len(value) > col.type.length: raise RuntimeError( "value for column %s is greater than max of %d characters: %s" % (col, col.type.length, value)) def ensureLength(self, col, value): assert col.type.length, "column %s does not have a length" % (col,) if value and len(value) > col.type.length: value = value[:col.type.length // 2] + hashlib.sha1(unicode2bytes(value)).hexdigest()[:col.type.length // 2] return value # returns a Deferred that returns a value def findSomethingId(self, tbl, whereclause, insert_values, _race_hook=None, autoCreate=True): d = self.findOrCreateSomethingId(tbl, whereclause, insert_values, _race_hook, autoCreate) d.addCallback(lambda pair: pair[0]) return d def findOrCreateSomethingId(self, tbl, whereclause, insert_values, _race_hook=None, autoCreate=True): """ Find a matching row and if one cannot be found optionally create it. Returns a deferred which resolves to the pair (id, found) where id is the primary key of the matching row and `found` is True if a match was found. `found` will be false if a new row was created. """ def thd(conn, no_recurse=False): # try to find the master q = sa.select([tbl.c.id], whereclause=whereclause) r = conn.execute(q) row = r.fetchone() r.close() # found it! if row: return row.id, True if not autoCreate: return None, False _race_hook and _race_hook(conn) try: r = conn.execute(tbl.insert(), [insert_values]) return r.inserted_primary_key[0], False except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # try it all over again, in case there was an overlapping, # identical call, but only retry once. if no_recurse: raise return thd(conn, no_recurse=True) return self.db.pool.do(thd) def hashColumns(self, *args): def encode(x): if x is None: return b'\xf5' elif isinstance(x, str): return x.encode('utf-8') return str(x).encode('utf-8') return hashlib.sha1(b'\0'.join(map(encode, args))).hexdigest() def doBatch(self, batch, batch_n=500): iterator = iter(batch) while True: batch = list(itertools.islice(iterator, batch_n)) if not batch: break yield batch class CachedMethod: def __init__(self, cache_name, method): self.cache_name = cache_name self.method = method def get_cached_method(self, component): meth = self.method meth_name = meth.__name__ cache = component.db.master.caches.get_cache(self.cache_name, lambda key: meth(component, key)) def wrap(key, no_cache=0): if no_cache: return meth(component, key) return cache.get(key) wrap.__name__ = meth_name + " (wrapped)" wrap.__module__ = meth.__module__ wrap.__doc__ = meth.__doc__ wrap.cache = cache return wrap def cached(cache_name): return lambda method: CachedMethod(cache_name, method) buildbot-2.6.0/master/buildbot/db/builders.py000066400000000000000000000135311361162603000211730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base class BuildersConnectorComponent(base.DBConnectorComponent): def findBuilderId(self, name, autoCreate=True): tbl = self.db.model.builders name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, ), autoCreate=autoCreate) @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): # convert to tag IDs first, as necessary def toTagid(tag): if isinstance(tag, type(1)): return defer.succeed(tag) ssConnector = self.master.db.tags return ssConnector.findTagId(tag) tagsids = [r[1] for r in (yield defer.DeferredList( [toTagid(tag) for tag in tags], fireOnOneErrback=True, consumeErrors=True))] def thd(conn): builders_tbl = self.db.model.builders builders_tags_tbl = self.db.model.builders_tags transaction = conn.begin() q = builders_tbl.update( whereclause=(builders_tbl.c.id == builderid)) conn.execute(q, description=description).close() # remove previous builders_tags conn.execute(builders_tags_tbl.delete( whereclause=((builders_tags_tbl.c.builderid == builderid)))).close() # add tag ids if tagsids: conn.execute(builders_tags_tbl.insert(), [dict(builderid=builderid, tagid=tagid) for tagid in tagsids]).close() transaction.commit() return (yield self.db.pool.do(thd)) def getBuilder(self, builderid): d = self.getBuilders(_builderid=builderid) @d.addCallback def first(bldrs): if bldrs: return bldrs[0] return None return d # returns a Deferred that returns None def addBuilderMaster(self, builderid=None, masterid=None): def thd(conn, no_recurse=False): try: tbl = self.db.model.builder_masters q = tbl.insert() conn.execute(q, builderid=builderid, masterid=masterid) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): pass return self.db.pool.do(thd) # returns a Deferred that returns None def removeBuilderMaster(self, builderid=None, masterid=None): def thd(conn, no_recurse=False): tbl = self.db.model.builder_masters conn.execute(tbl.delete( whereclause=((tbl.c.builderid == builderid) & (tbl.c.masterid == masterid)))) return self.db.pool.do(thd) def getBuilders(self, masterid=None, _builderid=None): def thd(conn): bldr_tbl = self.db.model.builders bm_tbl = self.db.model.builder_masters builders_tags_tbl = self.db.model.builders_tags tags_tbl = self.db.model.tags j = bldr_tbl.outerjoin(bm_tbl) # if we want to filter by masterid, we must join to builder_masters # again, so we can still get the full set of masters for each # builder if masterid is not None: limiting_bm_tbl = bm_tbl.alias('limiting_bm') j = j.join(limiting_bm_tbl, onclause=(bldr_tbl.c.id == limiting_bm_tbl.c.builderid)) q = sa.select( [bldr_tbl.c.id, bldr_tbl.c.name, bldr_tbl.c.description, bm_tbl.c.masterid], from_obj=[j], order_by=[bldr_tbl.c.id, bm_tbl.c.masterid]) if masterid is not None: # filter the masterid from the limiting table q = q.where(limiting_bm_tbl.c.masterid == masterid) if _builderid is not None: q = q.where(bldr_tbl.c.id == _builderid) # build up a intermediate builder id -> tag names map (fixes performance issue #3396) bldr_id_to_tags = defaultdict(list) bldr_q = sa.select([builders_tags_tbl.c.builderid, tags_tbl.c.name]) bldr_q = bldr_q.select_from(tags_tbl.join(builders_tags_tbl)) for bldr_id, tag in conn.execute(bldr_q).fetchall(): bldr_id_to_tags[bldr_id].append(tag) # now group those by builderid, aggregating by masterid rv = [] last = None for row in conn.execute(q).fetchall(): # pylint: disable=unsubscriptable-object if not last or row['id'] != last['id']: last = dict(id=row.id, name=row.name, masterids=[], description=row.description, tags=bldr_id_to_tags[row.id]) rv.append(last) if row['masterid']: last['masterids'].append(row['masterid']) return rv return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/db/buildrequests.py000066400000000000000000000225221361162603000222550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import itertools import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import NULL from buildbot.db import base from buildbot.process.results import RETRY from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class AlreadyClaimedError(Exception): pass class NotClaimedError(Exception): pass class BrDict(dict): pass class BuildRequestsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def _saSelectQuery(self): reqs_tbl = self.db.model.buildrequests claims_tbl = self.db.model.buildrequest_claims bsets_tbl = self.db.model.buildsets builder_tbl = self.db.model.builders bsss_tbl = self.db.model.buildset_sourcestamps sstamps_tbl = self.db.model.sourcestamps from_clause = reqs_tbl.outerjoin(claims_tbl, reqs_tbl.c.id == claims_tbl.c.brid) from_clause = from_clause.join(bsets_tbl, reqs_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(bsss_tbl, bsets_tbl.c.id == bsss_tbl.c.buildsetid) from_clause = from_clause.join(sstamps_tbl, bsss_tbl.c.sourcestampid == sstamps_tbl.c.id) from_clause = from_clause.join(builder_tbl, reqs_tbl.c.builderid == builder_tbl.c.id) return sa.select([reqs_tbl, claims_tbl, sstamps_tbl.c.branch, sstamps_tbl.c.repository, sstamps_tbl.c.codebase, builder_tbl.c.name.label('buildername') ]).select_from(from_clause) # returns a Deferred that returns a value def getBuildRequest(self, brid): def thd(conn): reqs_tbl = self.db.model.buildrequests q = self._saSelectQuery() q = q.where(reqs_tbl.c.id == brid) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._brdictFromRow(row, self.db.master.masterid) res.close() return rv return self.db.pool.do(thd) @defer.inlineCallbacks def getBuildRequests(self, builderid=None, complete=None, claimed=None, bsid=None, branch=None, repository=None, resultSpec=None): def deduplicateBrdict(brdicts): return list(({b['buildrequestid']: b for b in brdicts}).values()) def thd(conn): reqs_tbl = self.db.model.buildrequests claims_tbl = self.db.model.buildrequest_claims sstamps_tbl = self.db.model.sourcestamps q = self._saSelectQuery() if claimed is not None: if isinstance(claimed, bool): if not claimed: q = q.where( (claims_tbl.c.claimed_at == NULL) & (reqs_tbl.c.complete == 0)) else: q = q.where( (claims_tbl.c.claimed_at != NULL)) else: q = q.where( (claims_tbl.c.masterid == claimed)) if builderid is not None: q = q.where(reqs_tbl.c.builderid == builderid) if complete is not None: if complete: q = q.where(reqs_tbl.c.complete != 0) else: q = q.where(reqs_tbl.c.complete == 0) if bsid is not None: q = q.where(reqs_tbl.c.buildsetid == bsid) if branch is not None: q = q.where(sstamps_tbl.c.branch == branch) if repository is not None: q = q.where(sstamps_tbl.c.repository == repository) if resultSpec is not None: return deduplicateBrdict(resultSpec.thd_execute( conn, q, lambda r: self._brdictFromRow(r, self.db.master.masterid))) res = conn.execute(q) return deduplicateBrdict([self._brdictFromRow(row, self.db.master.masterid) for row in res.fetchall()]) res = yield self.db.pool.do(thd) return res @defer.inlineCallbacks def claimBuildRequests(self, brids, claimed_at=None): if claimed_at is not None: claimed_at = datetime2epoch(claimed_at) else: claimed_at = int(self.master.reactor.seconds()) def thd(conn): transaction = conn.begin() tbl = self.db.model.buildrequest_claims try: q = tbl.insert() conn.execute(q, [ dict(brid=id, masterid=self.db.master.masterid, claimed_at=claimed_at) for id in brids]) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): transaction.rollback() raise AlreadyClaimedError() transaction.commit() yield self.db.pool.do(thd) # returns a Deferred that returns None def unclaimBuildRequests(self, brids): def thd(conn): transaction = conn.begin() claims_tbl = self.db.model.buildrequest_claims # we'll need to batch the brids into groups of 100, so that the # parameter lists supported by the DBAPI aren't exhausted iterator = iter(brids) while True: batch = list(itertools.islice(iterator, 100)) if not batch: break # success! try: q = claims_tbl.delete( (claims_tbl.c.brid.in_(batch)) & (claims_tbl.c.masterid == self.db.master.masterid)) conn.execute(q) except Exception: transaction.rollback() raise transaction.commit() return self.db.pool.do(thd) @defer.inlineCallbacks def completeBuildRequests(self, brids, results, complete_at=None): assert results != RETRY, "a buildrequest cannot be completed with a retry status!" if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.master.reactor.seconds()) def thd(conn): transaction = conn.begin() # the update here is simple, but a number of conditions are # attached to ensure that we do not update a row inappropriately, # Note that checking that the request is mine would require a # subquery, so for efficiency that is not checked. reqs_tbl = self.db.model.buildrequests # we'll need to batch the brids into groups of 100, so that the # parameter lists supported by the DBAPI aren't exhausted for batch in self.doBatch(brids, 100): q = reqs_tbl.update() q = q.where(reqs_tbl.c.id.in_(batch)) q = q.where(reqs_tbl.c.complete != 1) res = conn.execute(q, complete=1, results=results, complete_at=complete_at) # if an incorrect number of rows were updated, then we failed. if res.rowcount != len(batch): log.msg("tried to complete %d buildrequests, " "but only completed %d" % (len(batch), res.rowcount)) transaction.rollback() raise NotClaimedError transaction.commit() yield self.db.pool.do(thd) @staticmethod def _brdictFromRow(row, master_masterid): claimed = False claimed_by_masterid = None claimed_at = None if row.claimed_at is not None: claimed_at = row.claimed_at claimed = True claimed_by_masterid = row.masterid submitted_at = epoch2datetime(row.submitted_at) complete_at = epoch2datetime(row.complete_at) claimed_at = epoch2datetime(claimed_at) return BrDict(buildrequestid=row.id, buildsetid=row.buildsetid, builderid=row.builderid, buildername=row.buildername, priority=row.priority, claimed=claimed, claimed_at=claimed_at, claimed_by_masterid=claimed_by_masterid, complete=bool(row.complete), results=row.results, submitted_at=submitted_at, complete_at=complete_at, waited_for=bool(row.waited_for)) buildbot-2.6.0/master/buildbot/db/builds.py000066400000000000000000000243441361162603000206500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base from buildbot.util import epoch2datetime class BuildsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def _getBuild(self, whereclause): def thd(conn): q = self.db.model.builds.select(whereclause=whereclause) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._builddictFromRow(row) res.close() return rv return self.db.pool.do(thd) def getBuild(self, buildid): return self._getBuild(self.db.model.builds.c.id == buildid) def getBuildByNumber(self, builderid, number): return self._getBuild( (self.db.model.builds.c.builderid == builderid) & (self.db.model.builds.c.number == number)) # returns a Deferred that returns a value def _getRecentBuilds(self, whereclause, offset=0, limit=1): def thd(conn): tbl = self.db.model.builds q = tbl.select(whereclause=whereclause, order_by=[sa.desc(tbl.c.complete_at)], offset=offset, limit=limit) res = conn.execute(q) return list([self._builddictFromRow(row) for row in res.fetchall()]) return self.db.pool.do(thd) @defer.inlineCallbacks def getPrevSuccessfulBuild(self, builderid, number, ssBuild): gssfb = self.master.db.sourcestamps.getSourceStampsForBuild rv = None tbl = self.db.model.builds offset = 0 increment = 1000 matchssBuild = {(ss['repository'], ss['branch'], ss['codebase']) for ss in ssBuild} while rv is None: # Get some recent successful builds on the same builder prevBuilds = yield self._getRecentBuilds(whereclause=((tbl.c.builderid == builderid) & (tbl.c.number < number) & (tbl.c.results == 0)), offset=offset, limit=increment) if not prevBuilds: break for prevBuild in prevBuilds: prevssBuild = {(ss['repository'], ss['branch'], ss['codebase']) for ss in (yield gssfb(prevBuild['id']))} if prevssBuild == matchssBuild: # A successful build with the same # repository/branch/codebase was found ! rv = prevBuild break offset += increment return rv def getBuildsForChange(self, changeid): assert changeid > 0 def thd(conn): # Get builds for the change changes_tbl = self.db.model.changes bsets_tbl = self.db.model.buildsets bsss_tbl = self.db.model.buildset_sourcestamps reqs_tbl = self.db.model.buildrequests builds_tbl = self.db.model.builds from_clause = changes_tbl.join(bsss_tbl, changes_tbl.c.sourcestampid == bsss_tbl.c.sourcestampid) from_clause = from_clause.join(bsets_tbl, bsss_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(reqs_tbl, bsets_tbl.c.id == reqs_tbl.c.buildsetid) from_clause = from_clause.join(builds_tbl, reqs_tbl.c.id == builds_tbl.c.buildrequestid) q = sa.select([builds_tbl]).select_from( from_clause).where(changes_tbl.c.changeid == changeid) res = conn.execute(q) return [self._builddictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): def thd(conn): tbl = self.db.model.builds q = tbl.select() if builderid is not None: q = q.where(tbl.c.builderid == builderid) if buildrequestid is not None: q = q.where(tbl.c.buildrequestid == buildrequestid) if workerid is not None: q = q.where(tbl.c.workerid == workerid) if complete is not None: if complete: q = q.where(tbl.c.complete_at != NULL) else: q = q.where(tbl.c.complete_at == NULL) if resultSpec is not None: return resultSpec.thd_execute(conn, q, self._builddictFromRow) res = conn.execute(q) return [self._builddictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string, _race_hook=None): started_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.builds # get the highest current number r = conn.execute(sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.builderid == builderid))) number = r.scalar() new_number = 1 if number is None else number + 1 # insert until we are successful.. while True: if _race_hook: _race_hook(conn) try: r = conn.execute(self.db.model.builds.insert(), dict(number=new_number, builderid=builderid, buildrequestid=buildrequestid, workerid=workerid, masterid=masterid, started_at=started_at, complete_at=None, state_string=state_string)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError) as e: # pg 9.5 gives this error which makes it pass some build # numbers if 'duplicate key value violates unique constraint "builds_pkey"' not in str(e): new_number += 1 continue return r.inserted_primary_key[0], new_number return self.db.pool.do(thd) # returns a Deferred that returns None def setBuildStateString(self, buildid, state_string): def thd(conn): tbl = self.db.model.builds q = tbl.update(whereclause=(tbl.c.id == buildid)) conn.execute(q, state_string=state_string) return self.db.pool.do(thd) # returns a Deferred that returns None def finishBuild(self, buildid, results): def thd(conn): tbl = self.db.model.builds q = tbl.update(whereclause=(tbl.c.id == buildid)) conn.execute(q, complete_at=self.master.reactor.seconds(), results=results) return self.db.pool.do(thd) # returns a Deferred that returns a value def getBuildProperties(self, bid): def thd(conn): bp_tbl = self.db.model.build_properties q = sa.select( [bp_tbl.c.name, bp_tbl.c.value, bp_tbl.c.source], whereclause=(bp_tbl.c.buildid == bid)) props = [] for row in conn.execute(q): prop = (json.loads(row.value), row.source) props.append((row.name, prop)) return dict(props) return self.db.pool.do(thd) @defer.inlineCallbacks def setBuildProperty(self, bid, name, value, source): """ A kind of create_or_update, that's between one or two queries per call """ def thd(conn): bp_tbl = self.db.model.build_properties self.checkLength(bp_tbl.c.name, name) self.checkLength(bp_tbl.c.source, source) whereclause = sa.and_(bp_tbl.c.buildid == bid, bp_tbl.c.name == name) q = sa.select( [bp_tbl.c.value, bp_tbl.c.source], whereclause=whereclause) prop = conn.execute(q).fetchone() value_js = json.dumps(value) if prop is None: conn.execute(bp_tbl.insert(), dict(buildid=bid, name=name, value=value_js, source=source)) elif (prop.value != value_js) or (prop.source != source): conn.execute(bp_tbl.update(whereclause=whereclause), dict(value=value_js, source=source)) yield self.db.pool.do(thd) def _builddictFromRow(self, row): return dict( id=row.id, number=row.number, builderid=row.builderid, buildrequestid=row.buildrequestid, workerid=row.workerid, masterid=row.masterid, started_at=epoch2datetime(row.started_at), complete_at=epoch2datetime(row.complete_at), state_string=row.state_string, results=row.results) buildbot-2.6.0/master/buildbot/db/buildsets.py000066400000000000000000000226331361162603000213630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Support for buildsets in the database """ import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class BsDict(dict): pass class BsProps(dict): pass class AlreadyCompleteError(RuntimeError): pass class BuildsetsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst @defer.inlineCallbacks def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): if submitted_at is not None: submitted_at = datetime2epoch(submitted_at) else: submitted_at = int(self.master.reactor.seconds()) # convert to sourcestamp IDs first, as necessary def toSsid(sourcestamp): if isinstance(sourcestamp, int): return defer.succeed(sourcestamp) ssConnector = self.master.db.sourcestamps return ssConnector.findSourceStampId(**sourcestamp) sourcestamps = yield defer.DeferredList( [toSsid(ss) for ss in sourcestamps], fireOnOneErrback=True, consumeErrors=True) sourcestampids = [r[1] for r in sourcestamps] def thd(conn): buildsets_tbl = self.db.model.buildsets self.checkLength(buildsets_tbl.c.reason, reason) self.checkLength(buildsets_tbl.c.external_idstring, external_idstring) transaction = conn.begin() # insert the buildset itself r = conn.execute(buildsets_tbl.insert(), dict( submitted_at=submitted_at, reason=reason, complete=0, complete_at=None, results=-1, external_idstring=external_idstring, parent_buildid=parent_buildid, parent_relationship=parent_relationship)) bsid = r.inserted_primary_key[0] # add any properties if properties: bs_props_tbl = self.db.model.buildset_properties inserts = [ dict(buildsetid=bsid, property_name=k, property_value=json.dumps([v, s])) for k, (v, s) in properties.items()] for i in inserts: self.checkLength(bs_props_tbl.c.property_name, i['property_name']) conn.execute(bs_props_tbl.insert(), inserts) # add sourcestamp ids r = conn.execute(self.db.model.buildset_sourcestamps.insert(), [dict(buildsetid=bsid, sourcestampid=ssid) for ssid in sourcestampids]) # and finish with a build request for each builder. Note that # sqlalchemy and the Python DBAPI do not provide a way to recover # inserted IDs from a multi-row insert, so this is done one row at # a time. brids = {} br_tbl = self.db.model.buildrequests ins = br_tbl.insert() for builderid in builderids: r = conn.execute(ins, dict(buildsetid=bsid, builderid=builderid, priority=0, claimed_at=0, claimed_by_name=None, claimed_by_incarnation=None, complete=0, results=-1, submitted_at=submitted_at, complete_at=None, waited_for=1 if waited_for else 0)) brids[builderid] = r.inserted_primary_key[0] transaction.commit() return (bsid, brids) bsid, brids = yield self.db.pool.do(thd) # Seed the buildset property cache. self.getBuildsetProperties.cache.put(bsid, BsProps(properties)) return (bsid, brids) @defer.inlineCallbacks def completeBuildset(self, bsid, results, complete_at=None): if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.buildsets q = tbl.update(whereclause=( (tbl.c.id == bsid) & ((tbl.c.complete == NULL) | (tbl.c.complete != 1)))) res = conn.execute(q, complete=1, results=results, complete_at=complete_at) if res.rowcount != 1: # happens when two buildrequests finish at the same time raise AlreadyCompleteError() yield self.db.pool.do(thd) # returns a Deferred that returns a value def getBuildset(self, bsid): def thd(conn): bs_tbl = self.db.model.buildsets q = bs_tbl.select(whereclause=(bs_tbl.c.id == bsid)) res = conn.execute(q) row = res.fetchone() if not row: return None return self._thd_row2dict(conn, row) return self.db.pool.do(thd) @defer.inlineCallbacks def getBuildsets(self, complete=None, resultSpec=None): def thd(conn): bs_tbl = self.db.model.buildsets q = bs_tbl.select() if complete is not None: if complete: q = q.where(bs_tbl.c.complete != 0) else: q = q.where((bs_tbl.c.complete == 0) | (bs_tbl.c.complete == NULL)) if resultSpec is not None: return resultSpec.thd_execute(conn, q, lambda x: self._thd_row2dict(conn, x)) res = conn.execute(q) return [self._thd_row2dict(conn, row) for row in res.fetchall()] res = yield self.db.pool.do(thd) return res # returns a Deferred that returns a value def getRecentBuildsets(self, count=None, branch=None, repository=None, complete=None): def thd(conn): bs_tbl = self.db.model.buildsets ss_tbl = self.db.model.sourcestamps j = self.db.model.buildsets j = j.join(self.db.model.buildset_sourcestamps) j = j.join(self.db.model.sourcestamps) q = sa.select(columns=[bs_tbl], from_obj=[j], distinct=True) q = q.order_by(sa.desc(bs_tbl.c.submitted_at)) q = q.limit(count) if complete is not None: if complete: q = q.where(bs_tbl.c.complete != 0) else: q = q.where((bs_tbl.c.complete == 0) | (bs_tbl.c.complete == NULL)) if branch: q = q.where(ss_tbl.c.branch == branch) if repository: q = q.where(ss_tbl.c.repository == repository) res = conn.execute(q) return list(reversed([self._thd_row2dict(conn, row) for row in res.fetchall()])) return self.db.pool.do(thd) # returns a Deferred that returns a value @base.cached("BuildsetProperties") def getBuildsetProperties(self, bsid): def thd(conn): bsp_tbl = self.db.model.buildset_properties q = sa.select( [bsp_tbl.c.property_name, bsp_tbl.c.property_value], whereclause=(bsp_tbl.c.buildsetid == bsid)) ret = [] for row in conn.execute(q): try: properties = json.loads(row.property_value) ret.append((row.property_name, tuple(properties))) except ValueError: pass return BsProps(ret) return self.db.pool.do(thd) def _thd_row2dict(self, conn, row): # get sourcestamps tbl = self.db.model.buildset_sourcestamps sourcestamps = [r.sourcestampid for r in conn.execute(sa.select([tbl.c.sourcestampid], (tbl.c.buildsetid == row.id))).fetchall()] return BsDict(external_idstring=row.external_idstring, reason=row.reason, submitted_at=epoch2datetime(row.submitted_at), complete=bool(row.complete), complete_at=epoch2datetime(row.complete_at), results=row.results, bsid=row.id, sourcestamps=sourcestamps, parent_buildid=row.parent_buildid, parent_relationship=row.parent_relationship) buildbot-2.6.0/master/buildbot/db/changes.py000066400000000000000000000372001361162603000207710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Support for changes in the database """ import json import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class ChDict(dict): pass class ChangesConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def getParentChangeIds(self, branch, repository, project, codebase): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid], whereclause=((changes_tbl.c.branch == branch) & (changes_tbl.c.repository == repository) & (changes_tbl.c.project == project) & (changes_tbl.c.codebase == codebase)), order_by=sa.desc(changes_tbl.c.changeid), limit=1) parent_id = conn.scalar(q) return [parent_id] if parent_id else [] return self.db.pool.do(thd) @defer.inlineCallbacks def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): assert project is not None, "project must be a string, not None" assert repository is not None, "repository must be a string, not None" if is_dir is not None: log.msg("WARNING: change source is providing deprecated " "value is_dir (ignored)") if when_timestamp is None: when_timestamp = epoch2datetime(self.master.reactor.seconds()) if properties is None: properties = {} # verify that source is 'Change' for each property for pv in properties.values(): assert pv[1] == 'Change', ("properties must be qualified with" "source 'Change'") ch_tbl = self.db.model.changes self.checkLength(ch_tbl.c.author, author) self.checkLength(ch_tbl.c.committer, committer) self.checkLength(ch_tbl.c.branch, branch) self.checkLength(ch_tbl.c.revision, revision) self.checkLength(ch_tbl.c.revlink, revlink) self.checkLength(ch_tbl.c.category, category) self.checkLength(ch_tbl.c.repository, repository) self.checkLength(ch_tbl.c.project, project) # calculate the sourcestamp first, before adding it ssid = yield self.db.sourcestamps.findSourceStampId( revision=revision, branch=branch, repository=repository, codebase=codebase, project=project) parent_changeids = yield self.getParentChangeIds(branch, repository, project, codebase) # Someday, changes will have multiple parents. # But for the moment, a Change can only have 1 parent parent_changeid = parent_changeids[0] if parent_changeids else None def thd(conn): # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicity - other database users may # still come across a change without its files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() r = conn.execute(ch_tbl.insert(), dict( author=author, committer=committer, comments=comments, branch=branch, revision=revision, revlink=revlink, when_timestamp=datetime2epoch(when_timestamp), category=category, repository=repository, codebase=codebase, project=project, sourcestampid=ssid, parent_changeids=parent_changeid)) changeid = r.inserted_primary_key[0] if files: tbl = self.db.model.change_files for f in files: self.checkLength(tbl.c.filename, f) conn.execute(tbl.insert(), [ dict(changeid=changeid, filename=f) for f in files ]) if properties: tbl = self.db.model.change_properties inserts = [ dict(changeid=changeid, property_name=k, property_value=json.dumps(v)) for k, v in properties.items() ] for i in inserts: self.checkLength(tbl.c.property_name, i['property_name']) conn.execute(tbl.insert(), inserts) if uid: ins = self.db.model.change_users.insert() conn.execute(ins, dict(changeid=changeid, uid=uid)) transaction.commit() return changeid return (yield self.db.pool.do(thd)) # returns a Deferred that returns a value @base.cached("chdicts") def getChange(self, changeid): assert changeid >= 0 def thd(conn): # get the row from the 'changes' table changes_tbl = self.db.model.changes q = changes_tbl.select( whereclause=(changes_tbl.c.changeid == changeid)) rp = conn.execute(q) row = rp.fetchone() if not row: return None # and fetch the ancillary data (files, properties) return self._chdict_from_change_row_thd(conn, row) return self.db.pool.do(thd) @defer.inlineCallbacks def getChangesForBuild(self, buildid): assert buildid > 0 gssfb = self.master.db.sourcestamps.getSourceStampsForBuild changes = list() currentBuild = yield self.master.db.builds.getBuild(buildid) fromChanges, toChanges = dict(), dict() ssBuild = yield gssfb(buildid) for ss in ssBuild: fromChanges[ss['codebase']] = yield self.getChangeFromSSid(ss['ssid']) # Get the last successful build on the same builder previousBuild = yield self.master.db.builds.getPrevSuccessfulBuild(currentBuild['builderid'], currentBuild[ 'number'], ssBuild) if previousBuild: for ss in (yield gssfb(previousBuild['id'])): toChanges[ss['codebase']] = yield self.getChangeFromSSid(ss['ssid']) else: # If no successful previous build, then we need to catch all # changes for cb in fromChanges: toChanges[cb] = {'changeid': None} # For each codebase, append changes until we match the parent for cb, change in fromChanges.items(): # Careful; toChanges[cb] may be None from getChangeFromSSid toCbChange = toChanges.get(cb) or {} if change and change['changeid'] != toCbChange.get('changeid'): changes.append(change) while ((toCbChange.get('changeid') not in change['parent_changeids']) and change['parent_changeids']): # For the moment, a Change only have 1 parent. change = yield self.master.db.changes.getChange(change['parent_changeids'][0]) # http://trac.buildbot.net/ticket/3461 sometimes, # parent_changeids could be corrupted if change is None: break changes.append(change) return changes # returns a Deferred that returns a value def getChangeFromSSid(self, sourcestampid): assert sourcestampid >= 0 def thd(conn): # get the row from the 'changes' table changes_tbl = self.db.model.changes q = changes_tbl.select( whereclause=(changes_tbl.c.sourcestampid == sourcestampid)) # if there are multiple changes for this ssid, get the most recent one q = q.order_by(changes_tbl.c.changeid.desc()) q = q.limit(1) rp = conn.execute(q) row = rp.fetchone() if not row: return None # and fetch the ancillary data (files, properties) return self._chdict_from_change_row_thd(conn, row) return self.db.pool.do(thd) # returns a Deferred that returns a value def getChangeUids(self, changeid): assert changeid >= 0 def thd(conn): cu_tbl = self.db.model.change_users q = cu_tbl.select(whereclause=(cu_tbl.c.changeid == changeid)) res = conn.execute(q) rows = res.fetchall() row_uids = [row.uid for row in rows] return row_uids return self.db.pool.do(thd) def getRecentChanges(self, count): def thd(conn): # get the changeids from the 'changes' table changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid], order_by=[sa.desc(changes_tbl.c.changeid)], limit=count) rp = conn.execute(q) changeids = [row.changeid for row in rp] rp.close() return list(reversed(changeids)) d = self.db.pool.do(thd) # then turn those into changes, using the cache @d.addCallback def get_changes(changeids): return defer.gatherResults([self.getChange(changeid) for changeid in changeids]) return d def getChanges(self): def thd(conn): # get the changeids from the 'changes' table changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid]) rp = conn.execute(q) changeids = [row.changeid for row in rp] rp.close() return list(changeids) d = self.db.pool.do(thd) # then turn those into changes, using the cache @d.addCallback def get_changes(changeids): return defer.gatherResults([self.getChange(changeid) for changeid in changeids]) return d # returns a Deferred that returns a value def getChangesCount(self): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([sa.func.count()]).select_from(changes_tbl) rp = conn.execute(q) r = 0 for row in rp: r = row[0] rp.close() return int(r) return self.db.pool.do(thd) # returns a Deferred that returns a value def getLatestChangeid(self): def thd(conn): changes_tbl = self.db.model.changes q = sa.select([changes_tbl.c.changeid], order_by=sa.desc(changes_tbl.c.changeid), limit=1) return conn.scalar(q) return self.db.pool.do(thd) # utility methods @defer.inlineCallbacks def pruneChanges(self, changeHorizon): """ Called periodically by DBConnector, this method deletes changes older than C{changeHorizon}. """ if not changeHorizon: return None def thd(conn): changes_tbl = self.db.model.changes # First, get the list of changes to delete. This could be written # as a subquery but then that subquery would be run for every # table, which is very inefficient; also, MySQL's subquery support # leaves much to be desired, and doesn't support this particular # form. q = sa.select([changes_tbl.c.changeid], order_by=[sa.desc(changes_tbl.c.changeid)], offset=changeHorizon) res = conn.execute(q) ids_to_delete = [r.changeid for r in res] # and delete from all relevant tables, in dependency order for table_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes', 'change_users'): remaining = ids_to_delete[:] while remaining: batch, remaining = remaining[:100], remaining[100:] table = self.db.model.metadata.tables[table_name] conn.execute( table.delete(table.c.changeid.in_(batch))) yield self.db.pool.do(thd) def _chdict_from_change_row_thd(self, conn, ch_row): # This method must be run in a db.pool thread, and returns a chdict # given a row from the 'changes' table change_files_tbl = self.db.model.change_files change_properties_tbl = self.db.model.change_properties if ch_row.parent_changeids: parent_changeids = [ch_row.parent_changeids] else: parent_changeids = [] chdict = ChDict( changeid=ch_row.changeid, parent_changeids=parent_changeids, author=ch_row.author, committer=ch_row.committer, files=[], # see below comments=ch_row.comments, revision=ch_row.revision, when_timestamp=epoch2datetime(ch_row.when_timestamp), branch=ch_row.branch, category=ch_row.category, revlink=ch_row.revlink, properties={}, # see below repository=ch_row.repository, codebase=ch_row.codebase, project=ch_row.project, sourcestampid=int(ch_row.sourcestampid)) query = change_files_tbl.select( whereclause=(change_files_tbl.c.changeid == ch_row.changeid)) rows = conn.execute(query) for r in rows: chdict['files'].append(r.filename) # and properties must be given without a source, so strip that, but # be flexible in case users have used a development version where the # change properties were recorded incorrectly def split_vs(vs): try: v, s = vs if s != "Change": v, s = vs, "Change" except (ValueError, TypeError): v, s = vs, "Change" return v, s query = change_properties_tbl.select( whereclause=(change_properties_tbl.c.changeid == ch_row.changeid)) rows = conn.execute(query) for r in rows: try: v, s = split_vs(json.loads(r.property_value)) chdict['properties'][r.property_name] = (v, s) except ValueError: pass return chdict buildbot-2.6.0/master/buildbot/db/changesources.py000066400000000000000000000074741361162603000222240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base class ChangeSourceAlreadyClaimedError(Exception): pass class ChangeSourcesConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def findChangeSourceId(self, name): tbl = self.db.model.changesources name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, )) # returns a Deferred that returns None def setChangeSourceMaster(self, changesourceid, masterid): def thd(conn): cs_mst_tbl = self.db.model.changesource_masters # handle the masterid=None case to get it out of the way if masterid is None: q = cs_mst_tbl.delete( whereclause=(cs_mst_tbl.c.changesourceid == changesourceid)) conn.execute(q) return # try a blind insert.. try: q = cs_mst_tbl.insert() conn.execute(q, dict(changesourceid=changesourceid, masterid=masterid)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # someone already owns this changesource. raise ChangeSourceAlreadyClaimedError return self.db.pool.do(thd) @defer.inlineCallbacks def getChangeSource(self, changesourceid): cs = yield self.getChangeSources(_changesourceid=changesourceid) if cs: return cs[0] # returns a Deferred that returns a value def getChangeSources(self, active=None, masterid=None, _changesourceid=None): def thd(conn): cs_tbl = self.db.model.changesources cs_mst_tbl = self.db.model.changesource_masters # handle the trivial case of masterid=xx and active=False if masterid is not None and active is not None and not active: return [] join = cs_tbl.outerjoin(cs_mst_tbl, (cs_tbl.c.id == cs_mst_tbl.c.changesourceid)) # if we're given a _changesourceid, select only that row wc = None if _changesourceid: wc = (cs_tbl.c.id == _changesourceid) else: # otherwise, filter with active, if necessary if masterid is not None: wc = (cs_mst_tbl.c.masterid == masterid) elif active: wc = (cs_mst_tbl.c.masterid != NULL) elif active is not None: wc = (cs_mst_tbl.c.masterid == NULL) q = sa.select([cs_tbl.c.id, cs_tbl.c.name, cs_mst_tbl.c.masterid], from_obj=join, whereclause=wc) return [dict(id=row.id, name=row.name, masterid=row.masterid) for row in conn.execute(q).fetchall()] return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/db/connector.py000066400000000000000000000137371361162603000213640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.application import internet from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.db import builders from buildbot.db import buildrequests from buildbot.db import builds from buildbot.db import buildsets from buildbot.db import changes from buildbot.db import changesources from buildbot.db import enginestrategy from buildbot.db import exceptions from buildbot.db import logs from buildbot.db import masters from buildbot.db import model from buildbot.db import pool from buildbot.db import schedulers from buildbot.db import sourcestamps from buildbot.db import state from buildbot.db import steps from buildbot.db import tags from buildbot.db import users from buildbot.db import workers from buildbot.util import service upgrade_message = textwrap.dedent("""\ The Buildmaster database needs to be upgraded before this version of buildbot can run. Use the following command-line buildbot upgrade-master {basedir} to upgrade the database, and try starting the buildmaster again. You may want to make a backup of your buildmaster before doing so. """).strip() class DBConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): # The connection between Buildbot and its backend database. This is # generally accessible as master.db, but is also used during upgrades. # # Most of the interesting operations available via the connector are # implemented in connector components, available as attributes of this # object, and listed below. # Period, in seconds, of the cleanup task. This master will perform # periodic cleanup actions on this schedule. CLEANUP_PERIOD = 3600 def __init__(self, basedir): super().__init__() self.setName('db') self.basedir = basedir # not configured yet - we don't build an engine until the first # reconfig self.configured_url = None # set up components self._engine = None # set up in reconfigService self.pool = None # set up in reconfigService @defer.inlineCallbacks def setServiceParent(self, p): yield super().setServiceParent(p) self.model = model.Model(self) self.changes = changes.ChangesConnectorComponent(self) self.changesources = changesources.ChangeSourcesConnectorComponent( self) self.schedulers = schedulers.SchedulersConnectorComponent(self) self.sourcestamps = sourcestamps.SourceStampsConnectorComponent(self) self.buildsets = buildsets.BuildsetsConnectorComponent(self) self.buildrequests = buildrequests.BuildRequestsConnectorComponent( self) self.state = state.StateConnectorComponent(self) self.builds = builds.BuildsConnectorComponent(self) self.workers = workers.WorkersConnectorComponent(self) self.users = users.UsersConnectorComponent(self) self.masters = masters.MastersConnectorComponent(self) self.builders = builders.BuildersConnectorComponent(self) self.steps = steps.StepsConnectorComponent(self) self.tags = tags.TagsConnectorComponent(self) self.logs = logs.LogsConnectorComponent(self) self.cleanup_timer = internet.TimerService(self.CLEANUP_PERIOD, self._doCleanup) self.cleanup_timer.clock = self.master.reactor yield self.cleanup_timer.setServiceParent(self) @defer.inlineCallbacks def setup(self, check_version=True, verbose=True): db_url = self.configured_url = self.master.config.db['db_url'] log.msg("Setting up database with URL %r" % util.stripUrlPassword(db_url)) # set up the engine and pool self._engine = enginestrategy.create_engine(db_url, basedir=self.basedir) self.pool = pool.DBThreadPool( self._engine, reactor=self.master.reactor, verbose=verbose) # make sure the db is up to date, unless specifically asked not to if check_version: if db_url == 'sqlite://': # Using in-memory database. Since it is reset after each process # restart, `buildbot upgrade-master` cannot be used (data is not # persistent). Upgrade model here to allow startup to continue. self.model.upgrade() current = yield self.model.is_current() if not current: for l in upgrade_message.format(basedir=self.master.basedir).split('\n'): log.msg(l) raise exceptions.DatabaseNotReadyError() def reconfigServiceWithBuildbotConfig(self, new_config): # double-check -- the master ensures this in config checks assert self.configured_url == new_config.db['db_url'] return super().reconfigServiceWithBuildbotConfig(new_config) def _doCleanup(self): """ Perform any periodic database cleanup tasks. @returns: Deferred """ # pass on this if we're not configured yet if not self.configured_url: return d = self.changes.pruneChanges(self.master.config.changeHorizon) d.addErrback(log.err, 'while pruning changes') return d buildbot-2.6.0/master/buildbot/db/dbconfig.py000066400000000000000000000056511361162603000211410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from sqlalchemy.exc import OperationalError from sqlalchemy.exc import ProgrammingError from buildbot.config import MasterConfig from buildbot.db import enginestrategy from buildbot.db import model from buildbot.db import state class FakeDBConnector: pass class FakeCacheManager: def get_cache(self, cache_name, miss_fn): return None class FakeMaster: pass class FakePool: pass class DbConfig: def __init__(self, BuildmasterConfig, basedir, name="config"): self.db_url = MasterConfig.getDbUrlFromConfig( BuildmasterConfig, throwErrors=False) self.basedir = basedir self.name = name def getDb(self): try: db_engine = enginestrategy.create_engine(self.db_url, basedir=self.basedir) except Exception: # db_url is probably trash. Just ignore, config.py db part will # create proper message return None db = FakeDBConnector() db.master = FakeMaster() db.pool = FakePool() db.pool.engine = db_engine db.master.caches = FakeCacheManager() db.model = model.Model(db) db.state = state.StateConnectorComponent(db) try: self.objectid = db.state.thdGetObjectId( db_engine, self.name, "DbConfig")['id'] except (ProgrammingError, OperationalError): # ProgrammingError: mysql&pg, OperationalError: sqlite # assume db is not initialized db.pool.engine.dispose() return None return db def get(self, name, default=state.StateConnectorComponent.Thunk): db = self.getDb() if db is not None: ret = db.state.thdGetState( db.pool.engine, self.objectid, name, default=default) db.pool.engine.dispose() else: if default is not state.StateConnectorComponent.Thunk: return default raise KeyError("Db not yet initialized") return ret def set(self, name, value): db = self.getDb() if db is not None: db.state.thdSetState(db.pool.engine, self.objectid, name, value) db.pool.engine.dispose() buildbot-2.6.0/master/buildbot/db/enginestrategy.py000066400000000000000000000253471361162603000224220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ A wrapper around `sqlalchemy.create_engine` that handles all of the special cases that Buildbot needs. Those include: - pool_recycle for MySQL - %(basedir) substitution - optimal thread pool size calculation """ import os import re import migrate import sqlalchemy as sa from sqlalchemy.engine import strategies from sqlalchemy.engine import url from sqlalchemy.pool import NullPool from twisted.python import log from buildbot.util import sautils # from http://www.mail-archive.com/sqlalchemy@googlegroups.com/msg15079.html class ReconnectingListener: def __init__(self): self.retried = False class Strategy: def set_up(self, u, engine): pass def should_retry(self, operational_error): try: text = operational_error.args[0] return 'Lost connection' in text or 'database is locked' in text except Exception: return False class SqlLiteStrategy(Strategy): def set_up(self, u, engine): """Special setup for sqlite engines""" def connect_listener_enable_fk(connection, record): # fk must be enabled for all connections if not getattr(engine, "fk_disabled", False): return # http://trac.buildbot.net/ticket/3490#ticket # connection.execute('pragma foreign_keys=ON') sa.event.listen(engine.pool, 'connect', connect_listener_enable_fk) # try to enable WAL logging if u.database: def connect_listener(connection, record): connection.execute("pragma checkpoint_fullfsync = off") sa.event.listen(engine.pool, 'connect', connect_listener) log.msg("setting database journal mode to 'wal'") try: engine.execute("pragma journal_mode = wal") except Exception: log.msg("failed to set journal mode - database may fail") class MySQLStrategy(Strategy): disconnect_error_codes = (2006, 2013, 2014, 2045, 2055) deadlock_error_codes = (1213,) def in_error_codes(self, args, error_codes): if args: return args[0] in error_codes return False def is_disconnect(self, args): return self.in_error_codes(args, self.disconnect_error_codes) def is_deadlock(self, args): return self.in_error_codes(args, self.deadlock_error_codes) def set_up(self, u, engine): """Special setup for mysql engines""" # add the reconnecting PoolListener that will detect a # disconnected connection and automatically start a new # one. This provides a measure of additional safety over # the pool_recycle parameter, and is useful when e.g., the # mysql server goes away def checkout_listener(dbapi_con, con_record, con_proxy): try: cursor = dbapi_con.cursor() cursor.execute("SELECT 1") except dbapi_con.OperationalError as ex: if self.is_disconnect(ex.args): # sqlalchemy will re-create the connection log.msg('connection will be removed') raise sa.exc.DisconnectionError() log.msg('exception happened {}'.format(ex)) raise # older versions of sqlalchemy require the listener to be specified # in the kwargs, in a class instance if sautils.sa_version() < (0, 7, 0): class ReconnectingListener: pass rcl = ReconnectingListener() rcl.checkout = checkout_listener engine.pool.add_listener(rcl) else: sa.event.listen(engine.pool, 'checkout', checkout_listener) def should_retry(self, ex): return any([self.is_disconnect(ex.orig.args), self.is_deadlock(ex.orig.args), super(MySQLStrategy, self).should_retry(ex)]) def get_sqlalchemy_migrate_version(): # sqlalchemy-migrate started including a version number in 0.7 # Borrowed from model.py version = getattr(migrate, '__version__', 'old') if version == 'old': try: from migrate.versioning import schemadiff if hasattr(schemadiff, 'ColDiff'): version = "0.6.1" else: version = "0.6" except Exception: version = "0.0" return tuple(map(int, version.split('.'))) class BuildbotEngineStrategy(strategies.PlainEngineStrategy): # A subclass of the PlainEngineStrategy that can effectively interact # with Buildbot. # # This adjusts the passed-in parameters to ensure that we get the behaviors # Buildbot wants from particular drivers, and wraps the outgoing Engine # object so that its methods run in threads and return deferreds. name = 'buildbot' def special_case_sqlite(self, u, kwargs): """For sqlite, percent-substitute %(basedir)s and use a full path to the basedir. If using a memory database, force the pool size to be 1.""" max_conns = 1 # when given a database path, stick the basedir in there if u.database: # Use NullPool instead of the sqlalchemy-0.6.8-default # SingletonThreadPool for sqlite to suppress the error in # http://groups.google.com/group/sqlalchemy/msg/f8482e4721a89589, # which also explains that NullPool is the new default in # sqlalchemy 0.7 for non-memory SQLite databases. kwargs.setdefault('poolclass', NullPool) u.database = u.database % dict(basedir=kwargs['basedir']) if not os.path.isabs(u.database[0]): u.database = os.path.join(kwargs['basedir'], u.database) else: # For in-memory database SQLAlchemy will use SingletonThreadPool # and we will run connection creation and all queries in the single # thread. # However connection destruction will be run from the main # thread, which is safe in our case, but not safe in general, # so SQLite will emit warning about it. # Silence that warning. kwargs.setdefault('connect_args', {})['check_same_thread'] = False # ignore serializing access to the db if 'serialize_access' in u.query: u.query.pop('serialize_access') return u, kwargs, max_conns def special_case_mysql(self, u, kwargs): """For mysql, take max_idle out of the query arguments, and use its value for pool_recycle. Also, force use_unicode and charset to be True and 'utf8', failing if they were set to anything else.""" kwargs['pool_recycle'] = int(u.query.pop('max_idle', 3600)) # default to the MyISAM storage engine storage_engine = u.query.pop('storage_engine', 'MyISAM') kwargs['connect_args'] = { 'init_command': 'SET default_storage_engine=%s' % storage_engine, } if 'use_unicode' in u.query: if u.query['use_unicode'] != "True": raise TypeError("Buildbot requires use_unicode=True " + "(and adds it automatically)") else: u.query['use_unicode'] = True if 'charset' in u.query: if u.query['charset'] != "utf8": raise TypeError("Buildbot requires charset=utf8 " + "(and adds it automatically)") else: u.query['charset'] = 'utf8' return u, kwargs, None def check_sqlalchemy_version(self): version = getattr(sa, '__version__', '0') try: version_digits = re.sub('[^0-9.]', '', version) version_tup = tuple(map(int, version_digits.split('.'))) except TypeError: return # unparseable -- oh well if version_tup < (0, 6): raise RuntimeError("SQLAlchemy version %s is too old" % (version,)) if version_tup > (0, 7, 10): mvt = get_sqlalchemy_migrate_version() if mvt < (0, 8, 0): raise RuntimeError("SQLAlchemy version %s is not supported by " "SQLAlchemy-Migrate version %d.%d.%d" % (version, mvt[0], mvt[1], mvt[2])) def get_drivers_strategy(self, drivername): if drivername.startswith('sqlite'): return SqlLiteStrategy() elif drivername.startswith('mysql'): return MySQLStrategy() return Strategy() def create(self, name_or_url, **kwargs): if 'basedir' not in kwargs: raise TypeError('no basedir supplied to create_engine') self.check_sqlalchemy_version() max_conns = None # apply special cases u = url.make_url(name_or_url) if u.drivername.startswith('sqlite'): u, kwargs, max_conns = self.special_case_sqlite(u, kwargs) elif u.drivername.startswith('mysql'): u, kwargs, max_conns = self.special_case_mysql(u, kwargs) # remove the basedir as it may confuse sqlalchemy basedir = kwargs.pop('basedir') # calculate the maximum number of connections from the pool parameters, # if it hasn't already been specified if max_conns is None: max_conns = kwargs.get( 'pool_size', 5) + kwargs.get('max_overflow', 10) strategy = self.get_drivers_strategy(u.drivername) engine = super().create(u, **kwargs) strategy.set_up(u, engine) engine.should_retry = strategy.should_retry # annotate the engine with the optimal thread pool size; this is used # by DBConnector to configure the surrounding thread pool engine.optimal_thread_pool_size = max_conns # keep the basedir engine.buildbot_basedir = basedir return engine BuildbotEngineStrategy() # this module is really imported for the side-effects, but pyflakes will like # us to use something from the module -- so offer a copy of create_engine, # which explicitly adds the strategy argument def create_engine(*args, **kwargs): kwargs['strategy'] = 'buildbot' return sa.create_engine(*args, **kwargs) buildbot-2.6.0/master/buildbot/db/exceptions.py000066400000000000000000000013641361162603000215440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class DatabaseNotReadyError(Exception): pass buildbot-2.6.0/master/buildbot/db/logs.py000066400000000000000000000407631361162603000203350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import bz2 import zlib import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base try: # lz4 > 0.9.0 from lz4.block import compress as dumps_lz4 from lz4.block import decompress as read_lz4 except ImportError: try: # lz4 < 0.9.0 from lz4 import dumps as dumps_lz4 from lz4 import loads as read_lz4 except ImportError: # pragma: no cover # config.py actually forbid this code path def dumps_lz4(data): return data def read_lz4(data): return data def dumps_gzip(data): return zlib.compress(data, 9) def read_gzip(data): return zlib.decompress(data) def dumps_bz2(data): return bz2.compress(data, 9) def read_bz2(data): return bz2.decompress(data) class LogsConnectorComponent(base.DBConnectorComponent): # Postgres and MySQL will both allow bigger sizes than this. The limit # for MySQL appears to be max_packet_size (default 1M). # note that MAX_CHUNK_SIZE is equal to BUFFER_SIZE in buildbot_worker.runprocess MAX_CHUNK_SIZE = 65536 # a chunk may not be bigger than this MAX_CHUNK_LINES = 1000 # a chunk may not have more lines than this COMPRESSION_MODE = {"raw": {"id": 0, "dumps": lambda x: x, "read": lambda x: x}, "gz": {"id": 1, "dumps": dumps_gzip, "read": read_gzip}, "bz2": {"id": 2, "dumps": dumps_bz2, "read": read_bz2}, "lz4": {"id": 3, "dumps": dumps_lz4, "read": read_lz4}} COMPRESSION_BYID = dict((x["id"], x) for x in COMPRESSION_MODE.values()) total_raw_bytes = 0 total_compressed_bytes = 0 # returns a Deferred that returns a value def _getLog(self, whereclause): def thd_getLog(conn): q = self.db.model.logs.select(whereclause=whereclause) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._logdictFromRow(row) res.close() return rv return self.db.pool.do(thd_getLog) def getLog(self, logid): return self._getLog(self.db.model.logs.c.id == logid) def getLogBySlug(self, stepid, slug): tbl = self.db.model.logs return self._getLog((tbl.c.slug == slug) & (tbl.c.stepid == stepid)) # returns a Deferred that returns a value def getLogs(self, stepid=None): def thdGetLogs(conn): tbl = self.db.model.logs q = tbl.select() if stepid is not None: q = q.where(tbl.c.stepid == stepid) q = q.order_by(tbl.c.id) res = conn.execute(q) return [self._logdictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thdGetLogs) # returns a Deferred that returns a value def getLogLines(self, logid, first_line, last_line): def thdGetLogLines(conn): # get a set of chunks that completely cover the requested range tbl = self.db.model.logchunks q = sa.select([tbl.c.first_line, tbl.c.last_line, tbl.c.content, tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.where(tbl.c.first_line <= last_line) q = q.where(tbl.c.last_line >= first_line) q = q.order_by(tbl.c.first_line) rv = [] for row in conn.execute(q): # Retrieve associated "reader" and extract the data # Note that row.content is stored as bytes, and our caller expects unicode data = self.COMPRESSION_BYID[ row.compressed]["read"](row.content) content = data.decode('utf-8') if row.first_line < first_line: idx = -1 count = first_line - row.first_line for _ in range(count): idx = content.index('\n', idx + 1) content = content[idx + 1:] if row.last_line > last_line: idx = len(content) + 1 count = row.last_line - last_line for _ in range(count): idx = content.rindex('\n', 0, idx) content = content[:idx] rv.append(content) return '\n'.join(rv) + '\n' if rv else '' return self.db.pool.do(thdGetLogLines) # returns a Deferred that returns a value def addLog(self, stepid, name, slug, type): assert type in 'tsh', "Log type must be one of t, s, or h" def thdAddLog(conn): try: r = conn.execute(self.db.model.logs.insert(), dict(name=name, slug=slug, stepid=stepid, complete=0, num_lines=0, type=type)) return r.inserted_primary_key[0] except (sa.exc.IntegrityError, sa.exc.ProgrammingError): raise KeyError( "log with slug '%r' already exists in this step" % (slug,)) return self.db.pool.do(thdAddLog) def thdCompressChunk(self, chunk): # Set the default compressed mode to "raw" id compressed_id = self.COMPRESSION_MODE["raw"]["id"] self.total_raw_bytes += len(chunk) # Do we have to compress the chunk? if self.master.config.logCompressionMethod != "raw": compressed_mode = self.COMPRESSION_MODE[ self.master.config.logCompressionMethod] compressed_chunk = compressed_mode["dumps"](chunk) # Is it useful to compress the chunk? if len(chunk) > len(compressed_chunk): compressed_id = compressed_mode["id"] chunk = compressed_chunk self.total_compressed_bytes += len(chunk) return chunk, compressed_id def thdSplitAndAppendChunk(self, conn, logid, content, first_line): # Break the content up into chunks. This takes advantage of the # fact that no character but u'\n' maps to b'\n' in UTF-8. remaining = content chunk_first_line = last_line = first_line while remaining: chunk, remaining = self._splitBigChunk(remaining, logid) last_line = chunk_first_line + chunk.count(b'\n') chunk, compressed_id = self.thdCompressChunk(chunk) conn.execute(self.db.model.logchunks.insert(), dict(logid=logid, first_line=chunk_first_line, last_line=last_line, content=chunk, compressed=compressed_id)).close() chunk_first_line = last_line + 1 conn.execute(self.db.model.logs.update(whereclause=(self.db.model.logs.c.id == logid)), num_lines=last_line + 1).close() return first_line, last_line def thdAppendLog(self, conn, logid, content): # check for trailing newline and strip it for storage -- chunks omit # the trailing newline assert content[-1] == '\n' # Note that row.content is stored as bytes, and our caller is sending unicode content = content[:-1].encode('utf-8') q = sa.select([self.db.model.logs.c.num_lines]) q = q.where(self.db.model.logs.c.id == logid) res = conn.execute(q) num_lines = res.fetchone() res.close() if not num_lines: return # ignore a missing log return self.thdSplitAndAppendChunk(conn=conn, logid=logid, content=content, first_line=num_lines[0]) # returns a Deferred that returns a value def appendLog(self, logid, content): def thdappendLog(conn): return self.thdAppendLog(conn, logid, content) return self.db.pool.do(thdappendLog) def _splitBigChunk(self, content, logid): """ Split CONTENT on a line boundary into a prefix smaller than 64k and a suffix containing the remainder, omitting the splitting newline. """ # if it's small enough, just return it if len(content) < self.MAX_CHUNK_SIZE: return content, None # find the last newline before the limit i = content.rfind(b'\n', 0, self.MAX_CHUNK_SIZE) if i != -1: return content[:i], content[i + 1:] log.msg('truncating long line for log %d' % logid) # first, truncate this down to something that decodes correctly truncline = content[:self.MAX_CHUNK_SIZE] while truncline: try: truncline.decode('utf-8') break except UnicodeDecodeError: truncline = truncline[:-1] # then find the beginning of the next line i = content.find(b'\n', self.MAX_CHUNK_SIZE) if i == -1: return truncline, None return truncline, content[i + 1:] # returns a Deferred that returns None def finishLog(self, logid): def thdfinishLog(conn): tbl = self.db.model.logs q = tbl.update(whereclause=(tbl.c.id == logid)) conn.execute(q, complete=1) return self.db.pool.do(thdfinishLog) @defer.inlineCallbacks def compressLog(self, logid, force=False): def thdcompressLog(conn): tbl = self.db.model.logchunks q = sa.select([tbl.c.first_line, tbl.c.last_line, sa.func.length(tbl.c.content), tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.order_by(tbl.c.first_line) rows = conn.execute(q) todo_gather_list = [] numchunks = 0 totlength = 0 todo_numchunks = 0 todo_first_line = 0 todo_last_line = 0 todo_length = 0 # first pass, we fetch the full list of chunks (without content) and find out # the chunk groups which could use some gathering. for row in rows: if (todo_length + row.length_1 > self.MAX_CHUNK_SIZE or (row.last_line - todo_first_line) > self.MAX_CHUNK_LINES): if todo_numchunks > 1 or (force and todo_numchunks): # this group is worth re-compressing todo_gather_list.append((todo_first_line, todo_last_line)) todo_first_line = row.first_line todo_length = 0 todo_numchunks = 0 todo_last_line = row.last_line # note that we count the compressed size for efficiency reason # unlike to the on-the-flow chunk splitter todo_length += row.length_1 totlength += row.length_1 todo_numchunks += 1 numchunks += 1 rows.close() if totlength == 0: # empty log return 0 if todo_numchunks > 1 or (force and todo_numchunks): # last chunk group todo_gather_list.append((todo_first_line, todo_last_line)) for todo_first_line, todo_last_line in todo_gather_list: # decompress this group of chunks. Note that the content is binary bytes. # no need to decode anything as we are going to put in back stored as bytes anyway q = sa.select( [tbl.c.first_line, tbl.c.last_line, tbl.c.content, tbl.c.compressed]) q = q.where(tbl.c.logid == logid) q = q.where(tbl.c.first_line >= todo_first_line) q = q.where(tbl.c.last_line <= todo_last_line) q = q.order_by(tbl.c.first_line) rows = conn.execute(q) chunk = b"" for row in rows: if chunk: chunk += b"\n" chunk += self.COMPRESSION_BYID[row.compressed][ "read"](row.content) rows.close() # Transaction is necessary so that readers don't see disappeared chunks transaction = conn.begin() # we remove the chunks that we are compressing d = tbl.delete() d = d.where(tbl.c.logid == logid) d = d.where(tbl.c.first_line >= todo_first_line) d = d.where(tbl.c.last_line <= todo_last_line) conn.execute(d).close() # and we recompress them in one big chunk chunk, compressed_id = self.thdCompressChunk(chunk) conn.execute(tbl.insert(), dict(logid=logid, first_line=todo_first_line, last_line=todo_last_line, content=chunk, compressed=compressed_id)).close() transaction.commit() # calculate how many bytes we saved q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) newsize = conn.execute(q).fetchone()[0] return totlength - newsize saved = yield self.db.pool.do(thdcompressLog) return saved # returns a Deferred that returns a value def deleteOldLogChunks(self, older_than_timestamp): def thddeleteOldLogs(conn): model = self.db.model res = conn.execute(sa.select([sa.func.count(model.logchunks.c.logid)])) count1 = res.fetchone()[0] res.close() # update log types older than timestamps # we do it first to avoid having UI discrepancy # N.B.: we utilize the fact that steps.id is auto-increment, thus steps.started_at # times are effectively sorted and we only need to find the steps.id at the upper # bound of steps to update. # SELECT steps.id from steps WHERE steps.started_at < older_than_timestamp ORDER BY steps.id DESC LIMIT 1; res = conn.execute( sa.select([model.steps.c.id]) .where(model.steps.c.started_at < older_than_timestamp) .order_by(model.steps.c.id.desc()) .limit(1) ) res_list = res.fetchone() stepid_max = None if res_list: stepid_max = res_list[0] res.close() # UPDATE logs SET logs.type = 'd' WHERE logs.stepid <= stepid_max AND type != 'd'; if stepid_max: res = conn.execute( model.logs.update() .where(sa.and_(model.logs.c.stepid <= stepid_max, model.logs.c.type != 'd')) .values(type='d') ) res.close() # query all logs with type 'd' and delete their chunks. if self.db._engine.dialect.name == 'sqlite': # sqlite does not support delete with a join, so for this case we use a subquery, # which is much slower q = sa.select([model.logs.c.id]) q = q.select_from(model.logs) q = q.where(model.logs.c.type == 'd') # delete their logchunks q = model.logchunks.delete().where(model.logchunks.c.logid.in_(q)) else: q = model.logchunks.delete() q = q.where(model.logs.c.id == model.logchunks.c.logid) q = q.where(model.logs.c.type == 'd') res = conn.execute(q) res.close() res = conn.execute(sa.select([sa.func.count(model.logchunks.c.logid)])) count2 = res.fetchone()[0] res.close() return count1 - count2 return self.db.pool.do(thddeleteOldLogs) def _logdictFromRow(self, row): rv = dict(row) rv['complete'] = bool(rv['complete']) return rv buildbot-2.6.0/master/buildbot/db/masters.py000066400000000000000000000075141361162603000210440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.db import base from buildbot.util import epoch2datetime class MasterDict(dict): pass class MastersConnectorComponent(base.DBConnectorComponent): data2db = {"masterid": "id", "link": "id"} def findMasterId(self, name): tbl = self.db.model.masters return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, name_hash=self.hashColumns(name), active=0, # initially inactive last_active=self.master.reactor.seconds() )) # returns a Deferred that returns a value def setMasterState(self, masterid, active): def thd(conn): tbl = self.db.model.masters whereclause = (tbl.c.id == masterid) # get the old state r = conn.execute(sa.select([tbl.c.active], whereclause=whereclause)) rows = r.fetchall() r.close() if not rows: return False # can't change a row that doesn't exist.. was_active = bool(rows[0].active) if not active: # if we're marking inactive, then delete any links to this # master sch_mst_tbl = self.db.model.scheduler_masters q = sch_mst_tbl.delete( whereclause=(sch_mst_tbl.c.masterid == masterid)) conn.execute(q) # set the state (unconditionally, just to be safe) q = tbl.update(whereclause=whereclause) q = q.values(active=1 if active else 0) if active: q = q.values(last_active=self.master.reactor.seconds()) conn.execute(q) # return True if there was a change in state return was_active != bool(active) return self.db.pool.do(thd) # returns a Deferred that returns a value def getMaster(self, masterid): def thd(conn): tbl = self.db.model.masters res = conn.execute(tbl.select( whereclause=(tbl.c.id == masterid))) row = res.fetchone() rv = None if row: rv = self._masterdictFromRow(row) res.close() return rv return self.db.pool.do(thd) # returns a Deferred that returns a value def getMasters(self): def thd(conn): tbl = self.db.model.masters return [ self._masterdictFromRow(row) for row in conn.execute(tbl.select()).fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns None def setAllMastersActiveLongTimeAgo(self): def thd(conn): tbl = self.db.model.masters q = tbl.update().values(active=1, last_active=0) conn.execute(q) return self.db.pool.do(thd) def _masterdictFromRow(self, row): return MasterDict(id=row.id, name=row.name, active=bool(row.active), last_active=epoch2datetime(row.last_active)) buildbot-2.6.0/master/buildbot/db/migrate/000077500000000000000000000000001361162603000204355ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/migrate/README000066400000000000000000000001531361162603000213140ustar00rootroot00000000000000This is a database migration repository. More information at http://code.google.com/p/sqlalchemy-migrate/ buildbot-2.6.0/master/buildbot/db/migrate/migrate.cfg000066400000000000000000000017301361162603000225470ustar00rootroot00000000000000[db_settings] # Used to identify which repository this database is versioned under. # You can use the name of your project. repository_id=Buildbot # The name of the database table used to track the schema version. # This name shouldn't already be used by your project. # If this is changed once a database is under version control, you'll need to # change the table name in each database too. version_table=migrate_version # When committing a change script, Migrate will attempt to generate the # sql for all supported databases; normally, if one of them fails - probably # because you don't have that database installed - it is ignored and the # commit continues, perhaps ending successfully. # Databases in this list MUST compile successfully during a commit, or the # entire commit will fail. List the databases your application will actually # be using to ensure your updates to that database work properly. # This must be a list; example: ['postgres','sqlite'] required_dbs=[] buildbot-2.6.0/master/buildbot/db/migrate/versions/000077500000000000000000000000001361162603000223055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/migrate/versions/040_add_builder_tags.py000066400000000000000000000017701361162603000265230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders_table = sautils.Table('builders', metadata, autoload=True) tags = sa.Column('tags', sa.Text, nullable=True) tags.create(builders_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/041_add_N_N_tagsbuilders.py000066400000000000000000000041431361162603000272770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) # drop the tags column builders.c.tags.drop() tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), # tag's name sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # a many-to-may relationship between builders and tags builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id'), nullable=False), ) # create the new tables tags.create() builders_tags.create() # and the indices idx = sa.Index('builders_tags_builderid', builders_tags.c.builderid) idx.create() idx = sa.Index('builders_tags_unique', builders_tags.c.builderid, builders_tags.c.tagid, unique=True) idx.create() idx = sa.Index('tag_name_hash', tags.c.name_hash, unique=True) idx.create() buildbot-2.6.0/master/buildbot/db/migrate/versions/042_add_build_properties_table.py000066400000000000000000000031011361162603000305710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('builds', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ) # This table contains input properties for builds build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id'), nullable=False), sa.Column('name', sa.String(256), nullable=False), # JSON-encoded value sa.Column('value', sa.Text, nullable=False), sa.Column('source', sa.Text, nullable=False), ) # create the new table build_properties.create() # and an Index on it. idx = sa.Index('build_properties_buildid', build_properties.c.buildid) idx.create() buildbot-2.6.0/master/buildbot/db/migrate/versions/043_changes_parent.py000066400000000000000000000021031361162603000262220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine changes_table = sautils.Table('changes', metadata, autoload=True) parent_changeids = sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid'), nullable=True) parent_changeids.create(changes_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/044_add_step_hidden.py000066400000000000000000000020221361162603000263400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine steps_table = sautils.Table('steps', metadata, autoload=True) hidden = sa.Column('hidden', sa.SmallInteger, nullable=False, server_default='0') hidden.create(steps_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/045_worker_transition.py000066400000000000000000000242321361162603000270350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.python import log from buildbot.db.types.json import JsonObject from buildbot.util import sautils def _create_configured_workers_table(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('builder_masters', metadata, autoload=True) sautils.Table('workers', metadata, autoload=True) # Create 'configured_workers' table. configured_workers = sautils.Table( 'configured_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('buildermasterid', sa.Integer, sa.ForeignKey('builder_masters.id'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id'), nullable=False), ) configured_workers.create() # Create indexes. idx = sa.Index('configured_workers_buildmasterid', configured_workers.c.buildermasterid) idx.create() idx = sa.Index('configured_workers_workers', configured_workers.c.workerid) idx.create() idx = sa.Index('configured_workers_identity', configured_workers.c.buildermasterid, configured_workers.c.workerid, unique=True) idx.create() def _create_connected_workers_table(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('masters', metadata, autoload=True) sautils.Table('workers', metadata, autoload=True) # Create 'connected_workers' table. connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id'), nullable=False), ) connected_workers.create() # Create indexes. idx = sa.Index('connected_workers_masterid', connected_workers.c.masterid) idx.create() idx = sa.Index('connected_workers_workers', connected_workers.c.workerid) idx.create() idx = sa.Index('connected_workers_identity', connected_workers.c.masterid, connected_workers.c.workerid, unique=True) idx.create() def _create_workers_table(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine # Create 'workers' table. workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(50), nullable=False), sa.Column("info", JsonObject, nullable=False), ) workers.create() # Create indexes. idx = sa.Index('workers_name', workers.c.name, unique=True) idx.create() def _add_workerid_fk_to_builds_table(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('builders', metadata, autoload=True) sautils.Table('buildrequests', metadata, autoload=True) sautils.Table('workers', metadata, autoload=True) sautils.Table('masters', metadata, autoload=True) builds = sautils.Table('builds', metadata, autoload=True) workerid = sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id')) workerid.create(builds) # Create indexes. idx = sa.Index('builds_workerid', builds.c.workerid) idx.create() def _migrate_workers_table_data(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine buildslaves = sautils.Table('buildslaves', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) c = buildslaves.c q = sa.select([c.id, c.name, c.info]) migrate_engine.execute( str(sautils.InsertFromSelect(workers, q))) def _migrate_configured_workers_table_data(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine configured_buildslaves = sautils.Table( 'configured_buildslaves', metadata, autoload=True) configured_workers = sautils.Table( 'configured_workers', metadata, autoload=True) c = configured_buildslaves.c q = sa.select([c.id, c.buildermasterid, c.buildslaveid.label('workerid')]) migrate_engine.execute( str(sautils.InsertFromSelect(configured_workers, q))) def _migrate_connected_workers_table_data(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine connected_buildslaves = sautils.Table( 'connected_buildslaves', metadata, autoload=True) connected_workers = sautils.Table( 'connected_workers', metadata, autoload=True) c = connected_buildslaves.c q = sa.select([c.id, c.masterid, c.buildslaveid.label('workerid')]) migrate_engine.execute( str(sautils.InsertFromSelect(connected_workers, q))) def _migrate_builds_table_data(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builds = sautils.Table('builds', metadata, autoload=True) s = builds.update().values(workerid=builds.c.buildslaveid) migrate_engine.execute(s) def _drop_configured_buildslaves(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine configured_buildslaves = sautils.Table( 'configured_buildslaves', metadata, autoload=True) configured_buildslaves.drop() def _drop_connected_buildslaves(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine connected_buildslaves = sautils.Table( 'connected_buildslaves', metadata, autoload=True) connected_buildslaves.drop() def _drop_buildslaveid_column_in_builds(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builds = sautils.Table('builds', metadata, autoload=True) builds.c.buildslaveid.drop() def _drop_buildslaves(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine buildslaves_old = sautils.Table('buildslaves', metadata, autoload=True) buildslaves_old.drop() def _remove_invalid_references_in_builds(migrate_engine): # 'buildslaveid' column of 'builds' table don't have Foreign Key # constraint on 'id' column of 'buildslaves' table, so it is # possible that that reference is invalid. # Remove such invalid references for easier resolve of #3088 later. metadata = sa.MetaData() metadata.bind = migrate_engine builds = sautils.Table('builds', metadata, autoload=True) buildslaves = sautils.Table('buildslaves', metadata, autoload=True) q = sa.select( [builds.c.id, builds.c.buildslaveid, buildslaves.c.id] ).select_from( builds.outerjoin( buildslaves, builds.c.buildslaveid == buildslaves.c.id) ).where( (buildslaves.c.id == None) & (builds.c.buildslaveid != None) ) invalid_references = q.execute().fetchall() if invalid_references: # Report invalid references. def format(res): return ("builds.id={id} builds.buildslaveid={buildslaveid} " "(not present in 'buildslaves' table)").format( id=res[0], buildslaveid=res[1]) log.msg( "'builds' table has invalid references on 'buildslaves' table:\n" "{0}".format("\n".join(map(format, invalid_references)))) # Remove invalid references. for build_id, buildslave_id, none in invalid_references: assert none is None q = sa.update(builds).where(builds.c.id == build_id).values( buildslaveid=None) q.execute() def upgrade(migrate_engine): # DB schema in version 044: # # buildslaves: # ... # # builds: # buildslaveid: Integer # ... # # configured_buildslaves: # buildslaveid: Integer, ForeignKey('buildslaves.id') # ... # # connected_buildslaves: # buildslaveid: Integer, ForeignKey('buildslaves.id') # ... # # Desired DB schema in version 045: # # workers: # ... # # builds: # workerid: Integer, ForeignKey('workers.id') # ... # # configured_workers: # workerid: Integer, ForeignKey('workers.id') # ... # # connected_workers: # workerid: Integer, ForeignKey('workers.id') # ... # # So we need to rename three tables, references to them, and add new # foreign key (issue #3088). # Plus indexes must be renamed/recreated. # # There is no external references on tables that being renamed # (i.e. on 'buildslaves', 'configured_buildslaves', # 'connected_buildslaves'), so we can safely recreate them without worrying # that ForeignKey constraints will be violated. metadata = sa.MetaData() metadata.bind = migrate_engine # Remove invalid references in builds -> buildslaves relation. _remove_invalid_references_in_builds(migrate_engine) _create_workers_table(migrate_engine) _create_configured_workers_table(migrate_engine) _create_connected_workers_table(migrate_engine) _add_workerid_fk_to_builds_table(migrate_engine) _migrate_workers_table_data(migrate_engine) _migrate_configured_workers_table_data(migrate_engine) _migrate_connected_workers_table_data(migrate_engine) _migrate_builds_table_data(migrate_engine) _drop_buildslaveid_column_in_builds(migrate_engine) _drop_connected_buildslaves(migrate_engine) _drop_configured_buildslaves(migrate_engine) _drop_buildslaves(migrate_engine) buildbot-2.6.0/master/buildbot/db/migrate/versions/046_mysql_innodb_compatibility.py000066400000000000000000000114611361162603000307020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from migrate import changeset from sqlalchemy.sql import func from sqlalchemy.sql import or_ from buildbot.util import sautils def _incompatible_changes(metadata, migrate_engine): changes = sautils.Table('changes', metadata, autoload=True) c = changes.c q = sa.select([c.changeid]).where(or_(func.length(c.author) > 255, func.length(c.branch) > 255, func.length(c.revision) > 255, func.length(c.category) > 255)) invalid_changes = q.execute().fetchall() errors = [] if invalid_changes: def format(res): return (" changes.change={id} " "has author, branch, revision or category " "longer than 255".format(id=res[0])) errors = ["- 'changes' table has invalid data:\n" "{0}".format("\n".join(map(format, invalid_changes)))] return errors def _incompatible_object_state(metadata, migrate_engine): object_state = sautils.Table('object_state', metadata, autoload=True) c = object_state.c q = sa.select([c.objectid]).where(func.length(c.name) > 255) invalid_object_states = q.execute().fetchall() errors = [] if invalid_object_states: def format(res): return (" object_state.objectid={id}" " has name longer than 255".format(id=res[0])) errors = ["- 'object_state' table has invalid data:\n" "{0}".format("\n".join(map(format, invalid_object_states)))] return errors def _incompatible_users(metadata, migrate_engine): users = sautils.Table('users', metadata, autoload=True) c = users.c q = sa.select([c.uid]).where(func.length(c.identifier) > 255) invalid_users = q.execute().fetchall() errors = [] if invalid_users: def format(res): return (" users.uid={id} " "has identifier longer than 255".format(id=res[0])) errors = ["- 'users_state' table has invalid data:\n" "{0}".format("\n".join(map(format, invalid_users)))] return errors def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine errors = sum([_incompatible_changes(metadata, migrate_engine), _incompatible_object_state(metadata, migrate_engine), _incompatible_users(metadata, migrate_engine)], []) if errors: raise ValueError("\n".join([""] + errors)) if migrate_engine.dialect.name == 'postgresql': # Sql alchemy migrate does not apply changes on postgresql def reduce_table_column_length(table, column): return 'ALTER TABLE {0} ALTER COLUMN {1} TYPE character varying(255)'.format(table, column) for table, columns in {'changes': ['author', 'branch', 'revision', 'category'], 'object_state': ['name'], 'users': ['identifier']}.items(): for column in columns: migrate_engine.execute( reduce_table_column_length(table, column)) return changeset.alter_column( sa.Column('author', sa.String(255), nullable=False), table='changes', metadata=metadata, engine=migrate_engine) changeset.alter_column( sa.Column('branch', sa.String(255)), table='changes', metadata=metadata, engine=migrate_engine) changeset.alter_column( sa.Column('revision', sa.String(255)), table='changes', metadata=metadata, engine=migrate_engine) changeset.alter_column( sa.Column('category', sa.String(255)), table='changes', metadata=metadata, engine=migrate_engine) changeset.alter_column( sa.Column('name', sa.String(255), nullable=False), table='object_state', metadata=metadata, engine=migrate_engine) changeset.alter_column( sa.Column('identifier', sa.String(255), nullable=False), table='users', metadata=metadata, engine=migrate_engine) buildbot-2.6.0/master/buildbot/db/migrate/versions/047_cascading_deletes.py000066400000000000000000000050371361162603000266770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from migrate.changeset.constraint import ForeignKeyConstraint from migrate.exceptions import NotSupportedError from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) masters = sautils.Table('masters', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) builder_masters = sautils.Table('builder_masters', metadata, autoload=True) configured_workers = sautils.Table('configured_workers', metadata, autoload=True) fks_to_change = [] # we need to parse the reflected model in order to find the automatic fk name that was put # mysql and pgsql have different naming convention so this is not very easy to have generic code working. for table, keys in [(builder_masters, (builders.c.id, masters.c.id)), (configured_workers, (builder_masters.c.id, workers.c.id))]: for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if c.column in keys: # migrate.xx.ForeignKeyConstraint is changing the model so initializing here # would break the iteration (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: pass # some versions of sqlite do not support drop, but will still update the fk fk.create() buildbot-2.6.0/master/buildbot/db/migrate/versions/048_change_properties_to_text.py000066400000000000000000000026031361162603000305220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from migrate import changeset def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine if migrate_engine.dialect.name == "postgresql": # changeset.alter_column has no effect on postgres, so we do this with # raw sql migrate_engine.execute( "alter table change_properties alter column property_value type text") else: # Commit messages can get too big for the normal 1024 String limit. changeset.alter_column( sa.Column('property_value', sa.Text, nullable=False), table='change_properties', metadata=metadata, engine=migrate_engine) buildbot-2.6.0/master/buildbot/db/migrate/versions/049_add_schedulers_enabled.py000066400000000000000000000020741361162603000277010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine schedulers_table = sautils.Table('schedulers', metadata, autoload=True) enabled = sa.Column('enabled', sa.SmallInteger, nullable=False, server_default="1") enabled.create(schedulers_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/050_cascading_deletes_all.py000066400000000000000000000066051361162603000275230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from migrate.changeset.constraint import ForeignKeyConstraint from migrate.exceptions import NotSupportedError from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine tables = {} for t in TABLES_FKEYS: tables[t] = sautils.Table(t, metadata, autoload=True) fks_to_change = [] # We need to parse the reflected model in order to find the automatic # fk name that was put. # Mysql and postgres have different naming convention so this is not very # easy to have generic code working. for t, keys in TABLES_FKEYS.items(): table = tables[t] for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if str(c.column) in keys: # migrate.xx.ForeignKeyConstraint is changing the model # so initializing here would break the iteration # (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: # some versions of sqlite do not support drop, # but will still update the fk pass fk.create() TABLES_FKEYS = { 'buildrequests': ['buildsets.id', 'builders.id'], 'buildrequest_claims': ['buildrequests.id', 'masters.id'], 'build_properties': ['builds.id'], 'builds': ['builders.id', 'buildrequests.id', 'workers.id', 'masters.id'], 'steps': ['builds.id'], 'logs': ['steps.id'], 'logchunks': ['logs.id'], 'buildset_properties': ['buildsets.id'], 'buildsets': ['builds.id'], 'changesource_masters': ['changesources.id', 'masters.id'], # 'configured_workers': ['builder_masters.id', 'workers.id'], 'connected_workers': ['masters.id', 'workers.id'], 'changes': ['sourcestamps.id', 'changes.changeid'], 'change_files': ['changes.changeid'], 'change_properties': ['changes.changeid'], 'change_users': ['changes.changeid', 'users.uid'], 'buildset_sourcestamps': ['buildsets.id', 'sourcestamps.id'], 'scheduler_masters': ['schedulers.id', 'masters.id'], 'scheduler_changes': ['schedulers.id', 'changes.changeid'], # 'builder_masters': ['builders.id', 'masters.id'], 'builders_tags': ['builders.id', 'tags.id'], 'object_state': ['objects.id'], 'users_info': ['users.uid'], } buildbot-2.6.0/master/buildbot/db/migrate/versions/051_add_worker_status.py000066400000000000000000000023021361162603000267650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine workers_table = sautils.Table('workers', metadata, autoload=True) paused = sa.Column('paused', sa.SmallInteger, nullable=False, server_default="0") graceful = sa.Column('graceful', sa.SmallInteger, nullable=False, server_default="0") paused.create(workers_table) graceful.create(workers_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/052_cascading_set_null.py000066400000000000000000000073261361162603000270760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings import sqlalchemy as sa from migrate.changeset.constraint import ForeignKeyConstraint from migrate.exceptions import NotSupportedError from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine table_names = set(TABLES_FKEYS_SET_NULL.keys()) table_names.update(TABLES_COLUMNS_NOT_NULL.keys()) tables = {} for t in table_names: tables[t] = sautils.Table(t, metadata, autoload=True) fks_to_change = [] # We need to parse the reflected model in order to find the automatic # fk name that was put. # Mysql and postgres have different naming convention so this is not very # easy to have generic code working. for t, keys in TABLES_FKEYS_SET_NULL.items(): table = tables[t] for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if str(c.column) in keys: # migrate.xx.ForeignKeyConstraint is changing the model # so initializing here would break the iteration # (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='SET NULL'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: # some versions of sqlite do not support drop, # but will still update the fk pass fk.create() for t, cols in TABLES_COLUMNS_NOT_NULL.items(): table = tables[t] if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 0;') try: col_objs = [] where = sa.false() for c in table.columns: if c.name in cols: col_objs.append(c) where |= c == None res = migrate_engine.execute(sa.select(col_objs).where(where)) if res.first(): warnings.warn( 'Inconsistent data found in DB: table %r, deleting invalid rows' % t) migrate_engine.execute(table.delete(where)) for c in col_objs: c.alter(nullable=False) finally: if table.dialect_options.get('mysql', {}).get('engine') == 'InnoDB': migrate_engine.execute('SET FOREIGN_KEY_CHECKS = 1;') TABLES_FKEYS_SET_NULL = { 'builds': ['workers.id'], 'buildsets': ['parent_buildid'], 'changes': ['changes.changeid'], } TABLES_COLUMNS_NOT_NULL = { 'buildrequest_claims': ['masterid'], 'builds': ['builderid'], 'changes': ['sourcestampid'], 'logchunks': ['logid'], 'logs': ['stepid'], 'scheduler_changes': ['schedulerid', 'changeid'], 'steps': ['buildid'], } buildbot-2.6.0/master/buildbot/db/migrate/versions/053_add_changes_committer.py000066400000000000000000000020401361162603000275450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine changes_table = sautils.Table('changes', metadata, autoload=True) committer = sa.Column('committer', sa.String(255), nullable=True, server_default=None) committer.create(changes_table) buildbot-2.6.0/master/buildbot/db/migrate/versions/054_add_index_to_steps.py000066400000000000000000000017701361162603000271130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine steps = sautils.Table('steps', metadata, autoload=True) idx = sa.Index('steps_started_at', steps.c.started_at) idx.create() buildbot-2.6.0/master/buildbot/db/migrate/versions/055_fix_changes_committer.py000066400000000000000000000020071361162603000276100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine changes_table = sautils.Table('changes', metadata, autoload=True) if not changes_table.c.committer.nullable: changes_table.c.committer.alter(nullable=True) buildbot-2.6.0/master/buildbot/db/migrate/versions/__init__.py000066400000000000000000000000001361162603000244040ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/migrate_utils.py000066400000000000000000000031741361162603000222340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from buildbot.util import sautils def test_unicode(migrate_engine): """Test that the database can handle inserting and selecting Unicode""" # set up a subsidiary MetaData object to hold this temporary table submeta = sa.MetaData() submeta.bind = migrate_engine test_unicode = sautils.Table( 'test_unicode', submeta, sa.Column('u', sa.Unicode(length=100)), sa.Column('b', sa.LargeBinary), ) test_unicode.create() # insert a unicode value in there u = "Frosty the \N{SNOWMAN}" b = b'\xff\xff\x00' ins = test_unicode.insert().values(u=u, b=b) migrate_engine.execute(ins) # see if the data is intact row = migrate_engine.execute(sa.select([test_unicode])).fetchall()[0] assert isinstance(row['u'], str) assert row['u'] == u assert isinstance(row['b'], bytes) assert row['b'] == b # drop the test table test_unicode.drop() buildbot-2.6.0/master/buildbot/db/model.py000066400000000000000000001140471361162603000204660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import migrate import migrate.versioning.repository import sqlalchemy as sa from migrate import exceptions # pylint: disable=ungrouped-imports from twisted.internet import defer from twisted.python import log from twisted.python import util from buildbot.db import base from buildbot.db.migrate_utils import test_unicode from buildbot.db.types.json import JsonObject from buildbot.util import sautils try: from migrate.versioning.schema import ControlledSchema # pylint: disable=ungrouped-imports except ImportError: ControlledSchema = None class EightUpgradeError(Exception): def __init__(self): message = """You are trying to upgrade a buildbot 0.8.x master to buildbot 0.9.x This is not supported. Please start from a clean database http://docs.buildbot.net/latest/manual/installation/nine-upgrade.html""" # Call the base class constructor with the parameters it needs super(EightUpgradeError, self).__init__(message) class Model(base.DBConnectorComponent): # # schema # metadata = sa.MetaData() # NOTES # * server_defaults here are included to match those added by the migration # scripts, but they should not be depended on - all code accessing these # tables should supply default values as necessary. The defaults are # required during migration when adding non-nullable columns to existing # tables. # # * dates are stored as unix timestamps (UTC-ish epoch time) # # * sqlalchemy does not handle sa.Boolean very well on MySQL or Postgres; # use sa.SmallInteger instead # build requests # A BuildRequest is a request for a particular build to be performed. Each # BuildRequest is a part of a Buildset. BuildRequests are claimed by # masters, to avoid multiple masters running the same build. buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), # if this is zero, then the build is still pending sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS, # etc - see master/buildbot/status/builder.py sa.Column('results', sa.SmallInteger), # time the buildrequest was created sa.Column('submitted_at', sa.Integer, nullable=False), # time the buildrequest was completed, or NULL sa.Column('complete_at', sa.Integer), # boolean indicating whether there is a step blocking, waiting for this # request to complete sa.Column('waited_for', sa.SmallInteger, server_default=sa.DefaultClause("0")), ) # Each row in this table represents a claimed build request, where the # claim is made by the master referenced by masterid. buildrequest_claims = sautils.Table( 'buildrequest_claims', metadata, sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id', ondelete='CASCADE'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), index=True, nullable=False), sa.Column('claimed_at', sa.Integer, nullable=False), ) # builds # This table contains the build properties build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('name', sa.String(256), nullable=False), # JSON encoded value sa.Column('value', sa.Text, nullable=False), sa.Column('source', sa.String(256), nullable=False), ) # This table contains basic information about each build. builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), # note that there is 1:N relationship here. # In case of worker loss, build has results RETRY # and buildrequest is unclaimed. # We use use_alter to prevent circular reference # (buildrequests -> buildsets -> builds). sa.Column('buildrequestid', sa.Integer, sa.ForeignKey( 'buildrequests.id', use_alter=True, name='buildrequestid', ondelete='CASCADE'), nullable=False), # worker which performed this build # keep nullable to support worker-free builds sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='SET NULL'), nullable=True), # master which controlled this build sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), # start/complete times sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), sa.Column('state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) # steps steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), sa.Column('state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), sa.Column( 'hidden', sa.SmallInteger, nullable=False, server_default='0'), ) # logs logs = sautils.Table( 'logs', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False), sa.Column('slug', sa.String(50), nullable=False), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id', ondelete='CASCADE'), nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False), sa.Column('num_lines', sa.Integer, nullable=False), # 's' = stdio, 't' = text, 'h' = html, 'd' = deleted sa.Column('type', sa.String(1), nullable=False), ) logchunks = sautils.Table( 'logchunks', metadata, sa.Column('logid', sa.Integer, sa.ForeignKey('logs.id', ondelete='CASCADE'), nullable=False), # 0-based line number range in this chunk (inclusive); note that for # HTML logs, this counts lines of HTML, not lines of rendered output sa.Column('first_line', sa.Integer, nullable=False), sa.Column('last_line', sa.Integer, nullable=False), # log contents, including a terminating newline, encoded in utf-8 or, # if 'compressed' is not 0, compressed with gzip, bzip2 or lz4 sa.Column('content', sa.LargeBinary(65536)), sa.Column('compressed', sa.SmallInteger, nullable=False), ) # buildsets # This table contains input properties for buildsets buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), # JSON-encoded tuple of (value, source) sa.Column('property_value', sa.Text, nullable=False), ) # This table represents Buildsets - sets of BuildRequests that share the # same original cause and source information. buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), # a simple external identifier to track down this buildset later, e.g., # for try requests sa.Column('external_idstring', sa.String(256)), # a short string giving the reason the buildset was created sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), # if this is zero, then the build set is still pending sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS, # etc - see master/buildbot/status/builder.py sa.Column('results', sa.SmallInteger), # optional parent build, we use use_alter to prevent circular reference # http://docs.sqlalchemy.org/en/latest/orm/relationships.html#rows-that-point-to-themselves-mutually-dependent-rows sa.Column('parent_buildid', sa.Integer, sa.ForeignKey('builds.id', use_alter=True, name='parent_buildid', ondelete='SET NULL'), nullable=True), # text describing what is the relationship with the build # could be 'triggered from', 'rebuilt from', 'inherited from' sa.Column('parent_relationship', sa.Text), ) # changesources # The changesources table gives a unique identifier to each ChangeSource. It # also links to other tables used to ensure only one master runs each # changesource changesources = sautils.Table( 'changesources', metadata, sa.Column("id", sa.Integer, primary_key=True), # name for this changesource, as given in the configuration, plus a hash # of that name used for a unique index sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False), ) # This links changesources to the master where they are running. A changesource # linked to a master that is inactive can be unlinked by any master. This # is a separate table so that we can "claim" changesources on a master by # inserting; this has better support in database servers for ensuring that # exactly one claim succeeds. changesource_masters = sautils.Table( 'changesource_masters', metadata, sa.Column('changesourceid', sa.Integer, sa.ForeignKey('changesources.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # workers workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(50), nullable=False), sa.Column("info", JsonObject, nullable=False), sa.Column("paused", sa.SmallInteger, nullable=False, server_default="0"), sa.Column("graceful", sa.SmallInteger, nullable=False, server_default="0"), ) # link workers to all builder/master pairs for which they are # configured configured_workers = sautils.Table( 'configured_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('buildermasterid', sa.Integer, sa.ForeignKey('builder_masters.id', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE'), nullable=False), ) # link workers to the masters they are currently connected to connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE'), nullable=False), ) # changes # Files touched in changes change_files = sautils.Table( 'change_files', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('filename', sa.String(1024), nullable=False), ) # Properties for changes change_properties = sautils.Table( 'change_properties', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), # JSON-encoded tuple of (value, source) sa.Column('property_value', sa.Text, nullable=False), ) # users associated with this change; this allows multiple users for # situations where a version-control system can represent both an author # and committer, for example. change_users = sautils.Table( "change_users", metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), # uid for the author of the change with the given changeid sa.Column('uid', sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), ) # Changes to the source code, produced by ChangeSources changes = sautils.Table( 'changes', metadata, # changeid also serves as 'change number' sa.Column('changeid', sa.Integer, primary_key=True), # author's name (usually an email address) sa.Column('author', sa.String(255), nullable=False), # committer's name sa.Column('committer', sa.String(255), nullable=True), # commit comment sa.Column('comments', sa.Text, nullable=False), # The branch where this change occurred. When branch is NULL, that # means the main branch (trunk, master, etc.) sa.Column('branch', sa.String(255)), # revision identifier for this change sa.Column('revision', sa.String(255)), # CVS uses NULL sa.Column('revlink', sa.String(256)), # this is the timestamp of the change - it is usually copied from the # version-control system, and may be long in the past or even in the # future! sa.Column('when_timestamp', sa.Integer, nullable=False), # an arbitrary string used for filtering changes sa.Column('category', sa.String(255)), # repository specifies, along with revision and branch, the # source tree in which this change was detected. sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), # codebase is a logical name to specify what is in the repository sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), # project names the project this source code represents. It is used # later to filter changes sa.Column('project', sa.String(length=512), nullable=False, server_default=''), # the sourcestamp this change brought the codebase to sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), # The parent of the change # Even if for the moment there's only 1 parent for a change, we use plural here because # somedays a change will have multiple parent. This way we don't need # to change the API sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) # sourcestamps # Patches for SourceStamps that were generated through the try mechanism patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), # number of directory levels to strip off (patch -pN) sa.Column('patchlevel', sa.Integer, nullable=False), # base64-encoded version of the patch file sa.Column('patch_base64', sa.Text, nullable=False), # patch author, if known sa.Column('patch_author', sa.Text, nullable=False), # patch comment sa.Column('patch_comment', sa.Text, nullable=False), # subdirectory in which the patch should be applied; NULL for top-level sa.Column('subdir', sa.Text), ) # A sourcestamp identifies a particular instance of the source code. # Ideally, this would always be absolute, but in practice source stamps can # also mean "latest" (when revision is NULL), which is of course a # time-dependent definition. sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), # hash of the branch, revision, patchid, repository, codebase, and # project, using hashColumns. sa.Column('ss_hash', sa.String(40), nullable=False), # the branch to check out. When branch is NULL, that means # the main branch (trunk, master, etc.) sa.Column('branch', sa.String(256)), # the revision to check out, or the latest if NULL sa.Column('revision', sa.String(256)), # the patch to apply to generate this source code sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id', ondelete='CASCADE'), nullable=True), # the repository from which this source should be checked out sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), # codebase is a logical name to specify what is in the repository sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), # the project this source code represents sa.Column('project', sa.String(length=512), nullable=False, server_default=''), # the time this sourcetamp was first seen (the first time it was added) sa.Column('created_at', sa.Integer, nullable=False), ) # a many-to-may relationship between buildsets and sourcestamps buildset_sourcestamps = sautils.Table( 'buildset_sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), ) # schedulers # The schedulers table gives a unique identifier to each scheduler. It # also links to other tables used to ensure only one master runs each # scheduler, and to track changes that a scheduler may trigger a build for # later. schedulers = sautils.Table( 'schedulers', metadata, sa.Column("id", sa.Integer, primary_key=True), # name for this scheduler, as given in the configuration, plus a hash # of that name used for a unique index sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False), sa.Column('enabled', sa.SmallInteger, server_default=sa.DefaultClause("1")), ) # This links schedulers to the master where they are running. A scheduler # linked to a master that is inactive can be unlinked by any master. This # is a separate table so that we can "claim" schedulers on a master by # inserting; this has better support in database servers for ensuring that # exactly one claim succeeds. The ID column is present for external users; # see bug #1053. scheduler_masters = sautils.Table( 'scheduler_masters', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # This table references "classified" changes that have not yet been # "processed". That is, the scheduler has looked at these changes and # determined that something should be done, but that hasn't happened yet. # Rows are deleted from this table as soon as the scheduler is done with # the change. scheduler_changes = sautils.Table( 'scheduler_changes', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE'), nullable=False), sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), # true (nonzero) if this change is important to this scheduler sa.Column('important', sa.Integer), ) # builders builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), # builder's name sa.Column('name', sa.Text, nullable=False), # builder's description sa.Column('description', sa.Text, nullable=True), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # This links builders to the master where they are running. A builder # linked to a master that is inactive can be unlinked by any master. Note # that builders can run on multiple masters at the same time. builder_masters = sautils.Table( 'builder_masters', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) # tags tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), # tag's name sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) # a many-to-may relationship between builders and tags builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id', ondelete='CASCADE'), nullable=False), ) # objects # This table uniquely identifies objects that need to maintain state across # invocations. objects = sautils.Table( "objects", metadata, # unique ID for this object sa.Column("id", sa.Integer, primary_key=True), # object's user-given name sa.Column('name', sa.String(128), nullable=False), # object's class name, basically representing a "type" for the state sa.Column('class_name', sa.String(128), nullable=False), ) # This table stores key/value pairs for objects, where the key is a string # and the value is a JSON string. object_state = sautils.Table( "object_state", metadata, # object for which this value is set sa.Column('objectid', sa.Integer, sa.ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), # name for this value (local to the object) sa.Column("name", sa.String(length=255), nullable=False), # value, as a JSON string sa.Column("value_json", sa.Text, nullable=False), ) # users # This table identifies individual users, and contains buildbot-specific # information about those users. users = sautils.Table( "users", metadata, # unique user id number sa.Column("uid", sa.Integer, primary_key=True), # identifier (nickname) for this user; used for display sa.Column("identifier", sa.String(255), nullable=False), # username portion of user credentials for authentication sa.Column("bb_username", sa.String(128)), # password portion of user credentials for authentication sa.Column("bb_password", sa.String(128)), ) # This table stores information identifying a user that's related to a # particular interface - a version-control system, status plugin, etc. users_info = sautils.Table( "users_info", metadata, # unique user id number sa.Column('uid', sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), # type of user attribute, such as 'git' sa.Column("attr_type", sa.String(128), nullable=False), # data for given user attribute, such as a commit string or password sa.Column("attr_data", sa.String(128), nullable=False), ) # masters masters = sautils.Table( "masters", metadata, # unique id per master sa.Column('id', sa.Integer, primary_key=True), # master's name (generally in the form hostname:basedir) sa.Column('name', sa.Text, nullable=False), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), # true if this master is running sa.Column('active', sa.Integer, nullable=False), # updated periodically by a running master, so silently failed masters # can be detected by other masters sa.Column('last_active', sa.Integer, nullable=False), ) # indexes sa.Index('buildrequests_buildsetid', buildrequests.c.buildsetid) sa.Index('buildrequests_builderid', buildrequests.c.builderid) sa.Index('buildrequests_complete', buildrequests.c.complete) sa.Index('build_properties_buildid', build_properties.c.buildid) sa.Index('builds_buildrequestid', builds.c.buildrequestid) sa.Index('buildsets_complete', buildsets.c.complete) sa.Index('buildsets_submitted_at', buildsets.c.submitted_at) sa.Index('buildset_properties_buildsetid', buildset_properties.c.buildsetid) sa.Index('workers_name', workers.c.name, unique=True) sa.Index('changes_branch', changes.c.branch) sa.Index('changes_revision', changes.c.revision) sa.Index('changes_author', changes.c.author) sa.Index('changes_category', changes.c.category) sa.Index('changes_when_timestamp', changes.c.when_timestamp) sa.Index('change_files_changeid', change_files.c.changeid) sa.Index('change_properties_changeid', change_properties.c.changeid) sa.Index('changes_sourcestampid', changes.c.sourcestampid) sa.Index('changesource_name_hash', changesources.c.name_hash, unique=True) sa.Index('scheduler_name_hash', schedulers.c.name_hash, unique=True) sa.Index('scheduler_changes_schedulerid', scheduler_changes.c.schedulerid) sa.Index('scheduler_changes_changeid', scheduler_changes.c.changeid) sa.Index('scheduler_changes_unique', scheduler_changes.c.schedulerid, scheduler_changes.c.changeid, unique=True) sa.Index('builder_name_hash', builders.c.name_hash, unique=True) sa.Index('builder_masters_builderid', builder_masters.c.builderid) sa.Index('builder_masters_masterid', builder_masters.c.masterid) sa.Index('builder_masters_identity', builder_masters.c.builderid, builder_masters.c.masterid, unique=True) sa.Index('tag_name_hash', tags.c.name_hash, unique=True) sa.Index('builders_tags_builderid', builders_tags.c.builderid) sa.Index('builders_tags_unique', builders_tags.c.builderid, builders_tags.c.tagid, unique=True) sa.Index('configured_workers_buildmasterid', configured_workers.c.buildermasterid) sa.Index('configured_workers_workers', configured_workers.c.workerid) sa.Index('configured_workers_identity', configured_workers.c.buildermasterid, configured_workers.c.workerid, unique=True) sa.Index('connected_workers_masterid', connected_workers.c.masterid) sa.Index('connected_workers_workers', connected_workers.c.workerid) sa.Index('connected_workers_identity', connected_workers.c.masterid, connected_workers.c.workerid, unique=True) sa.Index('users_identifier', users.c.identifier, unique=True) sa.Index('users_info_uid', users_info.c.uid) sa.Index('users_info_uid_attr_type', users_info.c.uid, users_info.c.attr_type, unique=True) sa.Index('users_info_attrs', users_info.c.attr_type, users_info.c.attr_data, unique=True) sa.Index('change_users_changeid', change_users.c.changeid) sa.Index('users_bb_user', users.c.bb_username, unique=True) sa.Index('object_identity', objects.c.name, objects.c.class_name, unique=True) sa.Index('name_per_object', object_state.c.objectid, object_state.c.name, unique=True) sa.Index('master_name_hashes', masters.c.name_hash, unique=True) sa.Index('buildrequest_claims_brids', buildrequest_claims.c.brid, unique=True) sa.Index('sourcestamps_ss_hash_key', sourcestamps.c.ss_hash, unique=True) sa.Index('buildset_sourcestamps_buildsetid', buildset_sourcestamps.c.buildsetid) sa.Index('buildset_sourcestamps_unique', buildset_sourcestamps.c.buildsetid, buildset_sourcestamps.c.sourcestampid, unique=True) sa.Index('builds_number', builds.c.builderid, builds.c.number, unique=True) sa.Index('builds_workerid', builds.c.workerid) sa.Index('builds_masterid', builds.c.masterid) sa.Index('steps_number', steps.c.buildid, steps.c.number, unique=True) sa.Index('steps_name', steps.c.buildid, steps.c.name, unique=True) sa.Index('steps_started_at', steps.c.started_at) sa.Index('logs_slug', logs.c.stepid, logs.c.slug, unique=True) sa.Index('logchunks_firstline', logchunks.c.logid, logchunks.c.first_line) sa.Index('logchunks_lastline', logchunks.c.logid, logchunks.c.last_line) # MySQL creates indexes for foreign keys, and these appear in the # reflection. This is a list of (table, index) names that should be # expected on this platform implied_indexes = [ ('change_users', dict(unique=False, column_names=['uid'], name='uid')), ('sourcestamps', dict(unique=False, column_names=['patchid'], name='patchid')), ('scheduler_masters', dict(unique=False, column_names=['masterid'], name='masterid')), ('changesource_masters', dict(unique=False, column_names=['masterid'], name='masterid')), ('buildset_sourcestamps', dict(unique=False, column_names=['sourcestampid'], name='sourcestampid')), ('buildsets', dict(unique=False, column_names=['parent_buildid'], name='parent_buildid')), ('builders_tags', dict(unique=False, column_names=['tagid'], name='tagid')), ('changes', dict(unique=False, column_names=['parent_changeids'], name='parent_changeids')), ] # # migration support # # this is a bit more complicated than might be expected because the first # seven database versions were once implemented using a homespun migration # system, and we need to support upgrading masters from that system. The # old system used a 'version' table, where SQLAlchemy-Migrate uses # 'migrate_version' repo_path = util.sibpath(__file__, "migrate") @defer.inlineCallbacks def is_current(self): if ControlledSchema is None: # this should have been caught earlier by enginestrategy.py with a # nicer error message raise ImportError("SQLAlchemy/SQLAlchemy-Migrate version conflict") def thd(engine): # we don't even have to look at the old version table - if there's # no migrate_version, then we're not up to date. repo = migrate.versioning.repository.Repository(self.repo_path) repo_version = repo.latest try: # migrate.api doesn't let us hand in an engine schema = ControlledSchema(engine, self.repo_path) db_version = schema.version except exceptions.DatabaseNotControlledError: return False return db_version == repo_version ret = yield self.db.pool.do_with_engine(thd) return ret # returns a Deferred that returns None def create(self): # this is nice and simple, but used only for tests def thd(engine): self.metadata.create_all(bind=engine) return self.db.pool.do_with_engine(thd) @defer.inlineCallbacks def upgrade(self): # here, things are a little tricky. If we have a 'version' table, then # we need to version_control the database with the proper version # number, drop 'version', and then upgrade. If we have no 'version' # table and no 'migrate_version' table, then we need to version_control # the database. Otherwise, we just need to upgrade it. def table_exists(engine, tbl): try: r = engine.execute("select * from %s limit 1" % tbl) r.close() return True except Exception: return False # http://code.google.com/p/sqlalchemy-migrate/issues/detail?id=100 # means we cannot use the migrate.versioning.api module. So these # methods perform similar wrapping functions to what is done by the API # functions, but without disposing of the engine. def upgrade(engine): schema = ControlledSchema(engine, self.repo_path) changeset = schema.changeset(None) with sautils.withoutSqliteForeignKeys(engine): for version, change in changeset: log.msg('migrating schema version %s -> %d' % (version, version + 1)) schema.runchange(version, change, 1) def check_sqlalchemy_migrate_version(): # sqlalchemy-migrate started including a version number in 0.7; we # support back to 0.6.1, but not 0.6. We'll use some discovered # differences between 0.6.1 and 0.6 to get that resolution. version = getattr(migrate, '__version__', 'old') if version == 'old': try: from migrate.versioning import schemadiff if hasattr(schemadiff, 'ColDiff'): version = "0.6.1" else: version = "0.6" except Exception: version = "0.0" version_tup = tuple(map(int, version.split('-', 1)[0].split('.'))) log.msg("using SQLAlchemy-Migrate version %s" % (version,)) if version_tup < (0, 6, 1): raise RuntimeError("You are using SQLAlchemy-Migrate %s. " "The minimum version is 0.6.1." % (version,)) def version_control(engine, version=None): ControlledSchema.create(engine, self.repo_path, version) # the upgrade process must run in a db thread def thd(engine): # if the migrate_version table exists, we can just let migrate # take care of this process. if table_exists(engine, 'migrate_version'): r = engine.execute( "select version from migrate_version limit 1") old_version = r.scalar() if old_version < 40: raise EightUpgradeError() upgrade(engine) # if the version table exists, then we can version_control things # at that version, drop the version table, and let migrate take # care of the rest. elif table_exists(engine, 'version'): raise EightUpgradeError() # otherwise, this db is new, so we don't bother using the migration engine # and just create the tables, and put the version directly to # latest else: # do some tests before getting started test_unicode(engine) log.msg("Initializing empty database") Model.metadata.create_all(engine) repo = migrate.versioning.repository.Repository(self.repo_path) version_control(engine, repo.latest) check_sqlalchemy_migrate_version() yield self.db.pool.do_with_engine(thd) buildbot-2.6.0/master/buildbot/db/pool.py000066400000000000000000000220131361162603000203260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import sqlite3 import time import traceback import sqlalchemy as sa from twisted.internet import defer from twisted.internet import threads from twisted.python import log from twisted.python import threadpool from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.db.buildsets import AlreadyCompleteError from buildbot.db.changesources import ChangeSourceAlreadyClaimedError from buildbot.db.schedulers import SchedulerAlreadyClaimedError from buildbot.process import metrics # set this to True for *very* verbose query debugging output; this can # be monkey-patched from master.cfg, too: # from buildbot.db import pool # pool.debug = True debug = False _debug_id = 1 def timed_do_fn(f): """Decorate a do function to log before, after, and elapsed time, with the name of the calling function. This is not speedy!""" def wrap(callable, *args, **kwargs): global _debug_id # get a description of the function that called us st = traceback.extract_stack(limit=2) file, line, name, _ = st[0] # and its locals frame = inspect.currentframe() locals = frame.f_locals # invent a unique ID for the description id, _debug_id = _debug_id, _debug_id + 1 descr = "%s-%08x" % (name, id) start_time = time.time() log.msg("%s - before ('%s' line %d)" % (descr, file, line)) for name in locals: if name in ('self', 'thd'): continue log.msg("%s - %s = %r" % (descr, name, locals[name])) # wrap the callable to log the begin and end of the actual thread # function def callable_wrap(*args, **kargs): log.msg("%s - thd start" % (descr,)) try: return callable(*args, **kwargs) finally: log.msg("%s - thd end" % (descr,)) d = f(callable_wrap, *args, **kwargs) @d.addBoth def after(x): end_time = time.time() elapsed = (end_time - start_time) * 1000 log.msg("%s - after (%0.2f ms elapsed)" % (descr, elapsed)) return x return d wrap.__name__ = f.__name__ wrap.__doc__ = f.__doc__ return wrap class DBThreadPool: running = False def __init__(self, engine, reactor, verbose=False): # verbose is used by upgrade scripts, and if it is set we should print # messages about versions and other warnings log_msg = log.msg if verbose: def _log_msg(m): print(m) log_msg = _log_msg self.reactor = reactor pool_size = 5 # If the engine has an C{optimal_thread_pool_size} attribute, then the # maxthreads of the thread pool will be set to that value. This is # most useful for SQLite in-memory connections, where exactly one # connection (and thus thread) should be used. if hasattr(engine, 'optimal_thread_pool_size'): pool_size = engine.optimal_thread_pool_size self._pool = threadpool.ThreadPool(minthreads=1, maxthreads=pool_size, name='DBThreadPool') self.engine = engine if engine.dialect.name == 'sqlite': vers = self.get_sqlite_version() if vers < (3, 7): log_msg("Using SQLite Version %s" % (vers,)) log_msg("NOTE: this old version of SQLite does not support " "WAL journal mode; a busy master may encounter " "'Database is locked' errors. Consider upgrading.") if vers < (3, 6, 19): log_msg("NOTE: this old version of SQLite is not " "supported.") raise RuntimeError("unsupported SQLite version") self._start_evt = self.reactor.callWhenRunning(self._start) # patch the do methods to do verbose logging if necessary if debug: self.do = timed_do_fn(self.do) self.do_with_engine = timed_do_fn(self.do_with_engine) def _start(self): self._start_evt = None if not self.running: self._pool.start() self._stop_evt = self.reactor.addSystemEventTrigger( 'during', 'shutdown', self._stop_nowait) self.running = True def _stop_nowait(self): self._stop_evt = None threads.deferToThreadPool(self.reactor, self._pool, self.engine.dispose) self._pool.stop() self.running = False @defer.inlineCallbacks def _stop(self): self._stop_evt = None yield threads.deferToThreadPool(self.reactor, self._pool, self.engine.dispose) self._pool.stop() self.running = False @defer.inlineCallbacks def shutdown(self): """Manually stop the pool. This is only necessary from tests, as the pool will stop itself when the reactor stops under normal circumstances.""" if not self._stop_evt: return # pool is already stopped self.reactor.removeSystemEventTrigger(self._stop_evt) yield self._stop() # Try about 170 times over the space of a day, with the last few tries # being about an hour apart. This is designed to span a reasonable amount # of time for repairing a broken database server, while still failing # actual problematic queries eventually BACKOFF_START = 1.0 BACKOFF_MULT = 1.05 MAX_OPERATIONALERROR_TIME = 3600 * 24 # one day def __thd(self, with_engine, callable, args, kwargs): # try to call callable(arg, *args, **kwargs) repeatedly until no # OperationalErrors occur, where arg is either the engine (with_engine) # or a connection (not with_engine) backoff = self.BACKOFF_START start = time.time() while True: if with_engine: arg = self.engine else: arg = self.engine.connect() try: try: rv = callable(arg, *args, **kwargs) assert not isinstance(rv, sa.engine.ResultProxy), \ "do not return ResultProxy objects!" except sa.exc.OperationalError as e: if not self.engine.should_retry(e): log.err(e, 'Got fatal OperationalError on DB') raise elapsed = time.time() - start if elapsed > self.MAX_OPERATIONALERROR_TIME: log.err(e, ('Raising due to {0} seconds delay on DB ' 'query retries'.format(self.MAX_OPERATIONALERROR_TIME))) raise metrics.MetricCountEvent.log( "DBThreadPool.retry-on-OperationalError") # sleep (remember, we're in a thread..) time.sleep(backoff) backoff *= self.BACKOFF_MULT # and re-try log.err(e, 'retrying {} after sql error {}'.format(callable, e)) continue except Exception as e: # AlreadyClaimedError are normal especially in a multimaster # configuration if not isinstance(e, (AlreadyClaimedError, ChangeSourceAlreadyClaimedError, SchedulerAlreadyClaimedError, AlreadyCompleteError)): log.err(e, 'Got fatal Exception on DB') raise finally: if not with_engine: arg.close() break return rv @defer.inlineCallbacks def do(self, callable, *args, **kwargs): ret = yield threads.deferToThreadPool(self.reactor, self._pool, self.__thd, False, callable, args, kwargs) return ret @defer.inlineCallbacks def do_with_engine(self, callable, *args, **kwargs): ret = yield threads.deferToThreadPool(self.reactor, self._pool, self.__thd, True, callable, args, kwargs) return ret def get_sqlite_version(self): return sqlite3.sqlite_version_info buildbot-2.6.0/master/buildbot/db/schedulers.py000066400000000000000000000200251361162603000215170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa import sqlalchemy.exc from twisted.internet import defer from buildbot.db import NULL from buildbot.db import base class SchedulerAlreadyClaimedError(Exception): pass class SchedulersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns None def enable(self, schedulerid, v): def thd(conn): tbl = self.db.model.schedulers q = tbl.update(whereclause=(tbl.c.id == schedulerid)) conn.execute(q, enabled=int(v)) return self.db.pool.do(thd) # returns a Deferred that returns None def classifyChanges(self, schedulerid, classifications): def thd(conn): tbl = self.db.model.scheduler_changes ins_q = tbl.insert() upd_q = tbl.update( ((tbl.c.schedulerid == schedulerid) & (tbl.c.changeid == sa.bindparam('wc_changeid')))) for changeid, important in classifications.items(): transaction = conn.begin() # convert the 'important' value into an integer, since that # is the column type imp_int = int(bool(important)) try: conn.execute(ins_q, schedulerid=schedulerid, changeid=changeid, important=imp_int).close() except (sqlalchemy.exc.ProgrammingError, sqlalchemy.exc.IntegrityError): transaction.rollback() transaction = conn.begin() # insert failed, so try an update conn.execute(upd_q, wc_changeid=changeid, important=imp_int).close() transaction.commit() return self.db.pool.do(thd) # returns a Deferred that returns None def flushChangeClassifications(self, schedulerid, less_than=None): def thd(conn): sch_ch_tbl = self.db.model.scheduler_changes wc = (sch_ch_tbl.c.schedulerid == schedulerid) if less_than is not None: wc = wc & (sch_ch_tbl.c.changeid < less_than) q = sch_ch_tbl.delete(whereclause=wc) conn.execute(q).close() return self.db.pool.do(thd) # returns a Deferred that returns a value def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): # -1 here stands for "argument not given", since None has meaning # as a branch def thd(conn): sch_ch_tbl = self.db.model.scheduler_changes ch_tbl = self.db.model.changes wc = (sch_ch_tbl.c.schedulerid == schedulerid) # may need to filter further based on branch, etc extra_wheres = [] if branch != -1: extra_wheres.append(ch_tbl.c.branch == branch) if repository != -1: extra_wheres.append(ch_tbl.c.repository == repository) if project != -1: extra_wheres.append(ch_tbl.c.project == project) if codebase != -1: extra_wheres.append(ch_tbl.c.codebase == codebase) # if we need to filter further append those, as well as a join # on changeid (but just once for that one) if extra_wheres: wc &= (sch_ch_tbl.c.changeid == ch_tbl.c.changeid) for w in extra_wheres: wc &= w q = sa.select( [sch_ch_tbl.c.changeid, sch_ch_tbl.c.important], whereclause=wc) return {r.changeid: [False, True][r.important] for r in conn.execute(q)} return self.db.pool.do(thd) def findSchedulerId(self, name): tbl = self.db.model.schedulers name_hash = self.hashColumns(name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name_hash == name_hash), insert_values=dict( name=name, name_hash=name_hash, )) # returns a Deferred that returns None def setSchedulerMaster(self, schedulerid, masterid): def thd(conn): sch_mst_tbl = self.db.model.scheduler_masters # handle the masterid=None case to get it out of the way if masterid is None: q = sch_mst_tbl.delete( whereclause=(sch_mst_tbl.c.schedulerid == schedulerid)) conn.execute(q).close() return # try a blind insert.. try: q = sch_mst_tbl.insert() conn.execute(q, dict(schedulerid=schedulerid, masterid=masterid)).close() except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # someone already owns this scheduler, but who? join = self.db.model.masters.outerjoin( sch_mst_tbl, (self.db.model.masters.c.id == sch_mst_tbl.c.masterid)) q = sa.select([self.db.model.masters.c.name, sch_mst_tbl.c.masterid], from_obj=join, whereclause=( sch_mst_tbl.c.schedulerid == schedulerid)) row = conn.execute(q).fetchone() # ok, that was us, so we just do nothing if row['masterid'] == masterid: return raise SchedulerAlreadyClaimedError( "already claimed by {}".format(row['name'])) return self.db.pool.do(thd) @defer.inlineCallbacks def getScheduler(self, schedulerid): sch = yield self.getSchedulers(_schedulerid=schedulerid) if sch: return sch[0] # returns a Deferred that returns a value def getSchedulers(self, active=None, masterid=None, _schedulerid=None): def thd(conn): sch_tbl = self.db.model.schedulers sch_mst_tbl = self.db.model.scheduler_masters # handle the trivial case of masterid=xx and active=False if masterid is not None and active is not None and not active: return [] join = sch_tbl.outerjoin(sch_mst_tbl, (sch_tbl.c.id == sch_mst_tbl.c.schedulerid)) # if we're given a _schedulerid, select only that row wc = None if _schedulerid: wc = (sch_tbl.c.id == _schedulerid) else: # otherwise, filter with active, if necessary if masterid is not None: wc = (sch_mst_tbl.c.masterid == masterid) elif active: wc = (sch_mst_tbl.c.masterid != NULL) elif active is not None: wc = (sch_mst_tbl.c.masterid == NULL) q = sa.select([sch_tbl.c.id, sch_tbl.c.name, sch_tbl.c.enabled, sch_mst_tbl.c.masterid], from_obj=join, whereclause=wc) return [dict(id=row.id, name=row.name, enabled=bool(row.enabled), masterid=row.masterid) for row in conn.execute(q).fetchall()] return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/db/sourcestamps.py000066400000000000000000000161611361162603000221140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import base from buildbot.util import bytes2unicode from buildbot.util import epoch2datetime from buildbot.util import unicode2bytes class SsDict(dict): pass class SsList(list): pass class SourceStampsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/database.rst @defer.inlineCallbacks def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): sourcestampid, _ = yield self.findOrCreateId( branch, revision, repository, project, codebase, patch_body, patch_level, patch_author, patch_comment, patch_subdir) return sourcestampid @defer.inlineCallbacks def findOrCreateId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): tbl = self.db.model.sourcestamps assert codebase is not None, "codebase cannot be None" assert project is not None, "project cannot be None" assert repository is not None, "repository cannot be None" self.checkLength(tbl.c.branch, branch) self.checkLength(tbl.c.revision, revision) self.checkLength(tbl.c.repository, repository) self.checkLength(tbl.c.project, project) # get a patchid, if we have a patch def thd(conn): patchid = None if patch_body: patch_body_bytes = unicode2bytes(patch_body) patch_base64_bytes = base64.b64encode(patch_body_bytes) ins = self.db.model.patches.insert() r = conn.execute(ins, dict( patchlevel=patch_level, patch_base64=bytes2unicode(patch_base64_bytes), patch_author=patch_author, patch_comment=patch_comment, subdir=patch_subdir)) patchid = r.inserted_primary_key[0] return patchid patchid = yield self.db.pool.do(thd) ss_hash = self.hashColumns(branch, revision, repository, project, codebase, patchid) sourcestampid, found = yield self.findOrCreateSomethingId( tbl=tbl, whereclause=tbl.c.ss_hash == ss_hash, insert_values={ 'branch': branch, 'revision': revision, 'repository': repository, 'codebase': codebase, 'project': project, 'patchid': patchid, 'ss_hash': ss_hash, 'created_at': self.master.reactor.seconds(), }) return sourcestampid, found # returns a Deferred that returns a value @base.cached("ssdicts") def getSourceStamp(self, ssid): def thd(conn): tbl = self.db.model.sourcestamps q = tbl.select(whereclause=(tbl.c.id == ssid)) res = conn.execute(q) row = res.fetchone() if not row: return None ssdict = self._rowToSsdict_thd(conn, row) res.close() return ssdict return self.db.pool.do(thd) # returns a Deferred that returns a value def getSourceStampsForBuild(self, buildid): assert buildid > 0 def thd(conn): # Get SourceStamps for the build builds_tbl = self.db.model.builds reqs_tbl = self.db.model.buildrequests bsets_tbl = self.db.model.buildsets bsss_tbl = self.db.model.buildset_sourcestamps sstamps_tbl = self.db.model.sourcestamps from_clause = builds_tbl.join(reqs_tbl, builds_tbl.c.buildrequestid == reqs_tbl.c.id) from_clause = from_clause.join(bsets_tbl, reqs_tbl.c.buildsetid == bsets_tbl.c.id) from_clause = from_clause.join(bsss_tbl, bsets_tbl.c.id == bsss_tbl.c.buildsetid) from_clause = from_clause.join(sstamps_tbl, bsss_tbl.c.sourcestampid == sstamps_tbl.c.id) q = sa.select([sstamps_tbl]).select_from( from_clause).where(builds_tbl.c.id == buildid) res = conn.execute(q) return [self._rowToSsdict_thd(conn, row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def getSourceStamps(self): def thd(conn): tbl = self.db.model.sourcestamps q = tbl.select() res = conn.execute(q) return [self._rowToSsdict_thd(conn, row) for row in res.fetchall()] return self.db.pool.do(thd) def _rowToSsdict_thd(self, conn, row): ssid = row.id ssdict = SsDict(ssid=ssid, branch=row.branch, revision=row.revision, patchid=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None, repository=row.repository, codebase=row.codebase, project=row.project, created_at=epoch2datetime(row.created_at)) patchid = row.patchid # fetch the patch, if necessary if patchid is not None: tbl = self.db.model.patches q = tbl.select(whereclause=(tbl.c.id == patchid)) res = conn.execute(q) row = res.fetchone() if row: # note the subtle renaming here ssdict['patchid'] = patchid ssdict['patch_level'] = row.patchlevel ssdict['patch_subdir'] = row.subdir ssdict['patch_author'] = row.patch_author ssdict['patch_comment'] = row.patch_comment ssdict['patch_body'] = base64.b64decode(row.patch_base64) else: log.msg('patchid %d, referenced from ssid %d, not found' % (patchid, ssid)) res.close() return ssdict buildbot-2.6.0/master/buildbot/db/state.py000066400000000000000000000152251361162603000205040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa import sqlalchemy.exc from buildbot.db import base class _IdNotFoundError(Exception): pass # used internally class ObjDict(dict): pass class StateConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst def getObjectId(self, name, class_name): # defer to a cached method that only takes one parameter (a tuple) d = self._getObjectId((name, class_name)) d.addCallback(lambda objdict: objdict['id']) return d # returns a Deferred that returns a value @base.cached('objectids') def _getObjectId(self, name_class_name_tuple): name, class_name = name_class_name_tuple def thd(conn): return self.thdGetObjectId(conn, name, class_name) return self.db.pool.do(thd) def thdGetObjectId(self, conn, name, class_name): objects_tbl = self.db.model.objects name = self.ensureLength(objects_tbl.c.name, name) self.checkLength(objects_tbl.c.class_name, class_name) def select(): q = sa.select([objects_tbl.c.id], whereclause=((objects_tbl.c.name == name) & (objects_tbl.c.class_name == class_name))) res = conn.execute(q) row = res.fetchone() res.close() if not row: raise _IdNotFoundError return row.id def insert(): res = conn.execute(objects_tbl.insert(), name=name, class_name=class_name) return res.inserted_primary_key[0] # we want to try selecting, then inserting, but if the insert fails # then try selecting again. We include an invocation of a hook # method to allow tests to exercise this particular behavior try: return ObjDict(id=select()) except _IdNotFoundError: pass self._test_timing_hook(conn) try: return ObjDict(id=insert()) except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): pass return ObjDict(id=select()) class Thunk: pass # returns a Deferred that returns a value def getState(self, objectid, name, default=Thunk): def thd(conn): return self.thdGetState(conn, objectid, name, default=default) return self.db.pool.do(thd) def thdGetState(self, conn, objectid, name, default=Thunk): object_state_tbl = self.db.model.object_state q = sa.select([object_state_tbl.c.value_json], whereclause=((object_state_tbl.c.objectid == objectid) & (object_state_tbl.c.name == name))) res = conn.execute(q) row = res.fetchone() res.close() if not row: if default is self.Thunk: raise KeyError("no such state value '%s' for object %d" % (name, objectid)) return default try: return json.loads(row.value_json) except ValueError: raise TypeError("JSON error loading state value '%s' for %d" % (name, objectid)) # returns a Deferred that returns a value def setState(self, objectid, name, value): def thd(conn): return self.thdSetState(conn, objectid, name, value) return self.db.pool.do(thd) def thdSetState(self, conn, objectid, name, value): object_state_tbl = self.db.model.object_state try: value_json = json.dumps(value) except (TypeError, ValueError): raise TypeError("Error encoding JSON for %r" % (value,)) name = self.ensureLength(object_state_tbl.c.name, name) def update(): q = object_state_tbl.update( whereclause=((object_state_tbl.c.objectid == objectid) & (object_state_tbl.c.name == name))) res = conn.execute(q, value_json=value_json) # check whether that worked return res.rowcount > 0 def insert(): conn.execute(object_state_tbl.insert(), objectid=objectid, name=name, value_json=value_json) # try updating; if that fails, try inserting; if that fails, then # we raced with another instance to insert, so let that instance # win. if update(): return self._test_timing_hook(conn) try: insert() except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): pass # someone beat us to it - oh well def _test_timing_hook(self, conn): # called so tests can simulate another process inserting a database row # at an inopportune moment pass # returns a Deferred that returns a value def atomicCreateState(self, objectid, name, thd_create_callback): def thd(conn): object_state_tbl = self.db.model.object_state res = self.thdGetState(conn, objectid, name, default=None) if res is None: res = thd_create_callback() try: value_json = json.dumps(res) except (TypeError, ValueError): raise TypeError("Error encoding JSON for %r" % (res,)) self._test_timing_hook(conn) try: conn.execute(object_state_tbl.insert(), objectid=objectid, name=name, value_json=value_json) except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): # someone beat us to it - oh well return that value return self.thdGetState(conn, objectid, name) return res return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/db/steps.py000066400000000000000000000160631361162603000205230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base from buildbot.util import epoch2datetime class StepsConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst url_lock = None @defer.inlineCallbacks def getStep(self, stepid=None, buildid=None, number=None, name=None): tbl = self.db.model.steps if stepid is not None: wc = (tbl.c.id == stepid) else: if buildid is None: raise RuntimeError('must supply either stepid or buildid') if number is not None: wc = (tbl.c.number == number) elif name is not None: wc = (tbl.c.name == name) else: raise RuntimeError('must supply either number or name') wc = wc & (tbl.c.buildid == buildid) def thd(conn): q = self.db.model.steps.select(whereclause=wc) res = conn.execute(q) row = res.fetchone() rv = None if row: rv = self._stepdictFromRow(row) res.close() return rv return (yield self.db.pool.do(thd)) # returns a Deferred that returns a value def getSteps(self, buildid): def thd(conn): tbl = self.db.model.steps q = tbl.select() q = q.where(tbl.c.buildid == buildid) q = q.order_by(tbl.c.number) res = conn.execute(q) return [self._stepdictFromRow(row) for row in res.fetchall()] return self.db.pool.do(thd) # returns a Deferred that returns a value def addStep(self, buildid, name, state_string): def thd(conn): tbl = self.db.model.steps # get the highest current number r = conn.execute(sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.buildid == buildid))) number = r.scalar() number = 0 if number is None else number + 1 # note that there is no chance for a race condition here, # since only one master is inserting steps. If there is a # conflict, then the name is likely already taken. insert_row = dict(buildid=buildid, number=number, started_at=None, complete_at=None, state_string=state_string, urls_json='[]', name=name) try: r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] except (sa.exc.IntegrityError, sa.exc.ProgrammingError): got_id = None if got_id: return (got_id, number, name) # we didn't get an id, so calculate a unique name and use that # instead. Because names are truncated at the right to fit in a # 50-character identifier, this isn't a simple query. res = conn.execute(sa.select([tbl.c.name], whereclause=((tbl.c.buildid == buildid)))) names = {row[0] for row in res} num = 1 while True: numstr = '_%d' % num newname = name[:50 - len(numstr)] + numstr if newname not in names: break num += 1 insert_row['name'] = newname r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] return (got_id, number, newname) return self.db.pool.do(thd) @defer.inlineCallbacks def startStep(self, stepid): started_at = int(self.master.reactor.seconds()) def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, started_at=started_at) yield self.db.pool.do(thd) # returns a Deferred that returns None def setStepStateString(self, stepid, state_string): def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, state_string=state_string) return self.db.pool.do(thd) def addURL(self, stepid, name, url, _racehook=None): # This methods adds an URL to the db # This is a read modify write and thus there is a possibility # that several urls are added at the same time (e.g with a deferredlist # at the end of a step) # this race condition is only inside the same master, as only one master # is supposed to add urls to a buildstep. # so threading.lock is used, as we are in the thread pool if self.url_lock is None: # this runs in reactor thread, so no race here.. self.url_lock = defer.DeferredLock() def thd(conn): tbl = self.db.model.steps wc = (tbl.c.id == stepid) q = sa.select([tbl.c.urls_json], whereclause=wc) res = conn.execute(q) row = res.fetchone() if _racehook is not None: _racehook() urls = json.loads(row.urls_json) url_item = dict(name=name, url=url) if url_item not in urls: urls.append(url_item) q = tbl.update(whereclause=wc) conn.execute(q, urls_json=json.dumps(urls)) return self.url_lock.run(lambda: self.db.pool.do(thd)) # returns a Deferred that returns None def finishStep(self, stepid, results, hidden): def thd(conn): tbl = self.db.model.steps q = tbl.update(whereclause=(tbl.c.id == stepid)) conn.execute(q, complete_at=self.master.reactor.seconds(), results=results, hidden=1 if hidden else 0) return self.db.pool.do(thd) def _stepdictFromRow(self, row): return dict( id=row.id, number=row.number, name=row.name, buildid=row.buildid, started_at=epoch2datetime(row.started_at), complete_at=epoch2datetime(row.complete_at), state_string=row.state_string, results=row.results, urls=json.loads(row.urls_json), hidden=bool(row.hidden)) buildbot-2.6.0/master/buildbot/db/tags.py000066400000000000000000000021001361162603000203060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.db import base class TagsConnectorComponent(base.DBConnectorComponent): def findTagId(self, name): tbl = self.db.model.tags return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, name_hash=self.hashColumns(name), )) buildbot-2.6.0/master/buildbot/db/types/000077500000000000000000000000001361162603000201515ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/types/__init__.py000066400000000000000000000000001361162603000222500ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/db/types/json.py000066400000000000000000000023111361162603000214710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from sqlalchemy.types import Text from sqlalchemy.types import TypeDecorator class JsonObject(TypeDecorator): """Represents an immutable json-encoded string.""" impl = Text def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) else: value = {} return value buildbot-2.6.0/master/buildbot/db/users.py000066400000000000000000000220061361162603000205200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from sqlalchemy.sql.expression import and_ from buildbot.db import base from buildbot.util import identifiers class UsDict(dict): pass class UsersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/db.rst # returns a Deferred that returns a value def findUserByAttr(self, identifier, attr_type, attr_data, _race_hook=None): # note that since this involves two tables, self.findSomethingId is not # helpful def thd(conn, no_recurse=False, identifier=identifier): tbl = self.db.model.users tbl_info = self.db.model.users_info self.checkLength(tbl.c.identifier, identifier) self.checkLength(tbl_info.c.attr_type, attr_type) self.checkLength(tbl_info.c.attr_data, attr_data) # try to find the user q = sa.select([tbl_info.c.uid], whereclause=and_(tbl_info.c.attr_type == attr_type, tbl_info.c.attr_data == attr_data)) rows = conn.execute(q).fetchall() if rows: return rows[0].uid _race_hook and _race_hook(conn) # try to do both of these inserts in a transaction, so that both # the new user and the corresponding attributes appear at the same # time from the perspective of other masters. transaction = conn.begin() inserted_user = False try: r = conn.execute(tbl.insert(), dict(identifier=identifier)) uid = r.inserted_primary_key[0] inserted_user = True conn.execute(tbl_info.insert(), dict(uid=uid, attr_type=attr_type, attr_data=attr_data)) transaction.commit() except (sa.exc.IntegrityError, sa.exc.ProgrammingError): transaction.rollback() # try it all over again, in case there was an overlapping, # identical call to findUserByAttr. If the identifier # collided, we'll try again indefinitely; otherwise, only once. if no_recurse: raise # if we failed to insert the user, then it's because the # identifier wasn't unique if not inserted_user: identifier = identifiers.incrementIdentifier( 256, identifier) else: no_recurse = True return thd(conn, no_recurse=no_recurse, identifier=identifier) return uid return self.db.pool.do(thd) # returns a Deferred that returns a value @base.cached("usdicts") def getUser(self, uid): def thd(conn): tbl = self.db.model.users tbl_info = self.db.model.users_info q = tbl.select(whereclause=(tbl.c.uid == uid)) users_row = conn.execute(q).fetchone() if not users_row: return None # gather all attr_type and attr_data entries from users_info table q = tbl_info.select(whereclause=(tbl_info.c.uid == uid)) rows = conn.execute(q).fetchall() return self.thd_createUsDict(users_row, rows) return self.db.pool.do(thd) def thd_createUsDict(self, users_row, rows): # make UsDict to return usdict = UsDict() for row in rows: usdict[row.attr_type] = row.attr_data # add the users_row data *after* the attributes in case attr_type # matches one of these keys. usdict['uid'] = users_row.uid usdict['identifier'] = users_row.identifier usdict['bb_username'] = users_row.bb_username usdict['bb_password'] = users_row.bb_password return usdict # returns a Deferred that returns a value def getUserByUsername(self, username): def thd(conn): tbl = self.db.model.users tbl_info = self.db.model.users_info q = tbl.select(whereclause=(tbl.c.bb_username == username)) users_row = conn.execute(q).fetchone() if not users_row: return None # gather all attr_type and attr_data entries from users_info table q = tbl_info.select(whereclause=(tbl_info.c.uid == users_row.uid)) rows = conn.execute(q).fetchall() return self.thd_createUsDict(users_row, rows) return self.db.pool.do(thd) # returns a Deferred that returns a value def getUsers(self): def thd(conn): tbl = self.db.model.users rows = conn.execute(tbl.select()).fetchall() dicts = [] if rows: for row in rows: ud = dict(uid=row.uid, identifier=row.identifier) dicts.append(ud) return dicts return self.db.pool.do(thd) # returns a Deferred that returns None def updateUser(self, uid=None, identifier=None, bb_username=None, bb_password=None, attr_type=None, attr_data=None, _race_hook=None): def thd(conn): transaction = conn.begin() tbl = self.db.model.users tbl_info = self.db.model.users_info update_dict = {} # first, add the identifier is it exists if identifier is not None: self.checkLength(tbl.c.identifier, identifier) update_dict['identifier'] = identifier # then, add the creds if they exist if bb_username is not None: assert bb_password is not None self.checkLength(tbl.c.bb_username, bb_username) self.checkLength(tbl.c.bb_password, bb_password) update_dict['bb_username'] = bb_username update_dict['bb_password'] = bb_password # update the users table if it needs to be updated if update_dict: q = tbl.update(whereclause=(tbl.c.uid == uid)) res = conn.execute(q, update_dict) # then, update the attributes, carefully handling the potential # update-or-insert race condition. if attr_type is not None: assert attr_data is not None self.checkLength(tbl_info.c.attr_type, attr_type) self.checkLength(tbl_info.c.attr_data, attr_data) # first update, then insert q = tbl_info.update( whereclause=(tbl_info.c.uid == uid) & (tbl_info.c.attr_type == attr_type)) res = conn.execute(q, attr_data=attr_data) if res.rowcount == 0: if _race_hook is not None: _race_hook(conn) # the update hit 0 rows, so try inserting a new one try: q = tbl_info.insert() res = conn.execute(q, uid=uid, attr_type=attr_type, attr_data=attr_data) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # someone else beat us to the punch inserting this row; # let them win. transaction.rollback() return transaction.commit() return self.db.pool.do(thd) # returns a Deferred that returns None def removeUser(self, uid): def thd(conn): # delete from dependent tables first, followed by 'users' for tbl in [ self.db.model.change_users, self.db.model.users_info, self.db.model.users, ]: conn.execute(tbl.delete(whereclause=(tbl.c.uid == uid))) return self.db.pool.do(thd) # returns a Deferred that returns a value def identifierToUid(self, identifier): def thd(conn): tbl = self.db.model.users q = tbl.select(whereclause=(tbl.c.identifier == identifier)) row = conn.execute(q).fetchone() if not row: return None return row.uid return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/db/workers.py000066400000000000000000000230531361162603000210560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from buildbot.db import base from buildbot.util import identifiers class WorkersConnectorComponent(base.DBConnectorComponent): # Documentation is in developer/database.rst def findWorkerId(self, name): tbl = self.db.model.workers # callers should verify this and give good user error messages assert identifiers.isIdentifier(50, name) return self.findSomethingId( tbl=tbl, whereclause=(tbl.c.name == name), insert_values=dict( name=name, info={}, paused=0, graceful=0, )) def _deleteFromConfiguredWorkers_thd(self, conn, buildermasterids, workerid=None): cfg_tbl = self.db.model.configured_workers # batch deletes to avoid using too many variables for batch in self.doBatch(buildermasterids, 100): q = cfg_tbl.delete() q = q.where(cfg_tbl.c.buildermasterid.in_(batch)) if workerid: q = q.where(cfg_tbl.c.workerid == workerid) conn.execute(q).close() # returns a Deferred which returns None def deconfigureAllWorkersForMaster(self, masterid): def thd(conn): # first remove the old configured buildermasterids for this master and worker # as sqlalchemy does not support delete with join, we need to do # that in 2 queries cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters j = cfg_tbl j = j.outerjoin(bm_tbl) q = sa.select( [cfg_tbl.c.buildermasterid], from_obj=[j], distinct=True) q = q.where(bm_tbl.c.masterid == masterid) res = conn.execute(q) buildermasterids = [row['buildermasterid'] for row in res] res.close() self._deleteFromConfiguredWorkers_thd(conn, buildermasterids) return self.db.pool.do(thd) # returns a Deferred that returns None def workerConfigured(self, workerid, masterid, builderids): def thd(conn): cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters # get the buildermasterids that are configured if builderids: q = sa.select([bm_tbl.c.id], from_obj=[bm_tbl]) q = q.where(bm_tbl.c.masterid == masterid) q = q.where(bm_tbl.c.builderid.in_(builderids)) res = conn.execute(q) buildermasterids = {row['id'] for row in res} res.close() else: buildermasterids = set([]) j = cfg_tbl j = j.outerjoin(bm_tbl) q = sa.select( [cfg_tbl.c.buildermasterid], from_obj=[j], distinct=True) q = q.where(bm_tbl.c.masterid == masterid) q = q.where(cfg_tbl.c.workerid == workerid) res = conn.execute(q) oldbuildermasterids = {row['buildermasterid'] for row in res} res.close() todeletebuildermasterids = oldbuildermasterids - buildermasterids toinsertbuildermasterids = buildermasterids - oldbuildermasterids transaction = conn.begin() self._deleteFromConfiguredWorkers_thd(conn, todeletebuildermasterids, workerid) # and insert the new ones if toinsertbuildermasterids: q = cfg_tbl.insert() conn.execute(q, [{'workerid': workerid, 'buildermasterid': buildermasterid} for buildermasterid in toinsertbuildermasterids]).close() transaction.commit() return self.db.pool.do(thd) @defer.inlineCallbacks def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): if workerid is None and name is None: return None workers = yield self.getWorkers(_workerid=workerid, _name=name, masterid=masterid, builderid=builderid) if workers: return workers[0] # returns a Deferred that returns a value def getWorkers(self, _workerid=None, _name=None, masterid=None, builderid=None, paused=None, graceful=None): def thd(conn): workers_tbl = self.db.model.workers conn_tbl = self.db.model.connected_workers cfg_tbl = self.db.model.configured_workers bm_tbl = self.db.model.builder_masters def selectWorker(q): return q # first, get the worker itself and the configured_on info j = workers_tbl j = j.outerjoin(cfg_tbl) j = j.outerjoin(bm_tbl) q = sa.select( [workers_tbl.c.id, workers_tbl.c.name, workers_tbl.c.info, workers_tbl.c.paused, workers_tbl.c.graceful, bm_tbl.c.builderid, bm_tbl.c.masterid], from_obj=[j], order_by=[workers_tbl.c.id]) if _workerid is not None: q = q.where(workers_tbl.c.id == _workerid) if _name is not None: q = q.where(workers_tbl.c.name == _name) if masterid is not None: q = q.where(bm_tbl.c.masterid == masterid) if builderid is not None: q = q.where(bm_tbl.c.builderid == builderid) if paused is not None: q = q.where(workers_tbl.c.paused == int(paused)) if graceful is not None: q = q.where(workers_tbl.c.graceful == int(graceful)) rv = {} res = None lastId = None cfgs = None for row in conn.execute(q): if row.id != lastId: lastId = row.id cfgs = [] res = { 'id': lastId, 'name': row.name, 'configured_on': cfgs, 'connected_to': [], 'workerinfo': row.info, 'paused': bool(row.paused), 'graceful': bool(row.graceful)} rv[lastId] = res if row.builderid and row.masterid: cfgs.append({'builderid': row.builderid, 'masterid': row.masterid}) # now go back and get the connection info for the same set of # workers j = conn_tbl if _name is not None: # note this is not an outer join; if there are unconnected # workers, they were captured in rv above j = j.join(workers_tbl) q = sa.select( [conn_tbl.c.workerid, conn_tbl.c.masterid], from_obj=[j], order_by=[conn_tbl.c.workerid]) if _workerid is not None: q = q.where(conn_tbl.c.workerid == _workerid) if _name is not None: q = q.where(workers_tbl.c.name == _name) if masterid is not None: q = q.where(conn_tbl.c.masterid == masterid) for row in conn.execute(q): id = row.workerid if id not in rv: continue rv[row.workerid]['connected_to'].append(row.masterid) return list(rv.values()) return self.db.pool.do(thd) # returns a Deferred that returns None def workerConnected(self, workerid, masterid, workerinfo): def thd(conn): conn_tbl = self.db.model.connected_workers q = conn_tbl.insert() try: conn.execute(q, {'workerid': workerid, 'masterid': masterid}) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): # if the row is already present, silently fail.. pass bs_tbl = self.db.model.workers q = bs_tbl.update(whereclause=(bs_tbl.c.id == workerid)) conn.execute(q, info=workerinfo) return self.db.pool.do(thd) # returns a Deferred that returns None def workerDisconnected(self, workerid, masterid): def thd(conn): tbl = self.db.model.connected_workers q = tbl.delete(whereclause=(tbl.c.workerid == workerid) & (tbl.c.masterid == masterid)) conn.execute(q) return self.db.pool.do(thd) # returns a Deferred that returns None def setWorkerState(self, workerid, paused, graceful): def thd(conn): tbl = self.db.model.workers q = tbl.update(whereclause=(tbl.c.id == workerid)) conn.execute(q, paused=int(paused), graceful=int(graceful)) return self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/errors.py000066400000000000000000000015361361162603000203130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Having them here prevents all kind of circular dependencies class PluginDBError(Exception): pass class CaptureCallbackError(Exception): pass buildbot-2.6.0/master/buildbot/interfaces.py000066400000000000000000001144101361162603000211160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Interface documentation. Define the interfaces that are implemented by various buildbot classes. """ # disable pylint warnings triggered by interface definitions # pylint: disable=no-self-argument # pylint: disable=no-method-argument # pylint: disable=inherit-non-class from zope.interface import Attribute from zope.interface import Interface # exceptions that can be raised while trying to start a build class BuilderInUseError(Exception): pass class WorkerTooOldError(Exception): pass class LatentWorkerFailedToSubstantiate(Exception): pass class LatentWorkerCannotSubstantiate(Exception): pass class LatentWorkerSubstantiatiationCancelled(Exception): pass class IPlugin(Interface): """ Base interface for all Buildbot plugins """ class IChangeSource(IPlugin): """ Service which feeds Change objects to the changemaster. When files or directories are changed in version control, this object should represent the changes as a change dictionary and call:: self.master.data.updates.addChange(who=.., rev=.., ..) See 'Writing Change Sources' in the manual for more information. """ master = Attribute('master', 'Pointer to BuildMaster, automatically set when started.') def describe(): """Return a string which briefly describes this source.""" class ISourceStamp(Interface): """ @cvar branch: branch from which source was drawn @type branch: string or None @cvar revision: revision of the source, or None to use CHANGES @type revision: varies depending on VC @cvar patch: patch applied to the source, or None if no patch @type patch: None or tuple (level diff) @cvar changes: the source step should check out the latest revision in the given changes @type changes: tuple of L{buildbot.changes.changes.Change} instances, all of which are on the same branch @cvar project: project this source code represents @type project: string @cvar repository: repository from which source was drawn @type repository: string """ def canBeMergedWith(self, other): """ Can this SourceStamp be merged with OTHER? """ def mergeWith(self, others): """Generate a SourceStamp for the merger of me and all the other SourceStamps. This is called by a Build when it starts, to figure out what its sourceStamp should be.""" def getAbsoluteSourceStamp(self, got_revision): """Get a new SourceStamp object reflecting the actual revision found by a Source step.""" def getText(self): """Returns a list of strings to describe the stamp. These are intended to be displayed in a narrow column. If more space is available, the caller should join them together with spaces before presenting them to the user.""" class IEmailSender(Interface): """I know how to send email, and can be used by other parts of the Buildbot to contact developers.""" class IEmailLookup(Interface): def getAddress(user): """Turn a User-name string into a valid email address. Either return a string (with an @ in it), None (to indicate that the user cannot be reached by email), or a Deferred which will fire with the same.""" class ILogObserver(Interface): """Objects which provide this interface can be used in a BuildStep to watch the output of a LogFile and parse it incrementally. """ # internal methods def setStep(step): pass def setLog(log): pass # methods called by the LogFile def logChunk(build, step, log, channel, text): pass class IWorker(IPlugin): # callback methods from the manager pass class ILatentWorker(IWorker): """A worker that is not always running, but can run when requested. """ substantiated = Attribute('Substantiated', 'Whether the latent worker is currently ' 'substantiated with a real instance.') def substantiate(): """Request that the worker substantiate with a real instance. Returns a deferred that will callback when a real instance has attached.""" # there is an insubstantiate too, but that is not used externally ATM. def buildStarted(wfb): """Inform the latent worker that a build has started. @param wfb: a L{LatentWorkerForBuilder}. The wfb is the one for whom the build finished. """ def buildFinished(wfb): """Inform the latent worker that a build has finished. @param wfb: a L{LatentWorkerForBuilder}. The wfb is the one for whom the build finished. """ class IMachine(Interface): pass class IMachineAction(Interface): def perform(self, manager): """ Perform an action on the machine managed by manager. Returns a deferred evaluating to True if it was possible to execute the action. """ class ILatentMachine(IMachine): """ A machine that is not always running, but can be started when requested. """ class IRenderable(Interface): """An object that can be interpolated with properties from a build. """ def getRenderingFor(iprops): """Return a deferred that fires with interpolation with the given properties @param iprops: the L{IProperties} provider supplying the properties. """ class IProperties(Interface): """ An object providing access to build properties """ def getProperty(name, default=None): """Get the named property, returning the default if the property does not exist. @param name: property name @type name: string @param default: default value (default: @code{None}) @returns: property value """ def hasProperty(name): """Return true if the named property exists. @param name: property name @type name: string @returns: boolean """ def has_key(name): """Deprecated name for L{hasProperty}.""" def setProperty(name, value, source, runtime=False): """Set the given property, overwriting any existing value. The source describes the source of the value for human interpretation. @param name: property name @type name: string @param value: property value @type value: JSON-able value @param source: property source @type source: string @param runtime: (optional) whether this property was set during the build's runtime: usually left at its default value @type runtime: boolean """ def getProperties(): """Get the L{buildbot.process.properties.Properties} instance storing these properties. Note that the interface for this class is not stable, so where possible the other methods of this interface should be used. @returns: L{buildbot.process.properties.Properties} instance """ def getBuild(): """Get the L{buildbot.process.build.Build} instance for the current build. Note that this object is not available after the build is complete, at which point this method will return None. Try to avoid using this method, as the API of L{Build} instances is not well-defined. @returns L{buildbot.process.build.Build} instance """ def render(value): """Render @code{value} as an L{IRenderable}. This essentially coerces @code{value} to an L{IRenderable} and calls its @L{getRenderingFor} method. @name value: value to render @returns: rendered value """ class IScheduler(IPlugin): pass class ITriggerableScheduler(Interface): """ A scheduler that can be triggered by buildsteps. """ def trigger(waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger a build with the given source stamp and properties. """ class IBuildStepFactory(Interface): def buildStep(): pass class IBuildStep(IPlugin): """ A build step """ # Currently has nothing class IConfigured(Interface): def getConfigDict(): pass # #################### Deprecated Status Interfaces #################### class IStatus(Interface): """I am an object, obtainable from the buildmaster, which can provide status information.""" def getTitle(): """Return the name of the project that this Buildbot is working for.""" def getTitleURL(): """Return the URL of this Buildbot's project.""" def getBuildbotURL(): """Return the URL of the top-most Buildbot status page, or None if this Buildbot does not provide a web status page.""" def getURLForThing(thing): """Return the URL of a page which provides information on 'thing', which should be an object that implements one of the status interfaces defined in L{buildbot.interfaces}. Returns None if no suitable page is available (or if no Waterfall is running).""" def getChangeSources(): """Return a list of IChangeSource objects.""" def getChange(number): """Return an IChange object.""" def getSchedulers(): """Return a list of ISchedulerStatus objects for all currently-registered Schedulers.""" def getBuilderNames(tags=None): """Return a list of the names of all current Builders.""" def getBuilder(name): """Return the IBuilderStatus object for a given named Builder. Raises KeyError if there is no Builder by that name.""" def getWorkerNames(): """Return a list of worker names, suitable for passing to getWorker().""" def getWorker(name): """Return the IWorkerStatus object for a given named worker.""" def getBuildSets(): """ Return a list of un-completed build sets. @returns: list of L{IBuildSetStatus} implementations, via Deferred. """ def generateFinishedBuilds(builders=None, branches=None, num_builds=None, finished_before=None, max_search=200): """Return a generator that will produce IBuildStatus objects each time you invoke its .next() method, starting with the most recent finished build and working backwards. @param builders: this is a list of Builder names, and the generator will only produce builds that ran on the given Builders. If the list is empty, produce builds from all Builders. @param branches: this is a list of branch names, and the generator will only produce builds that used the given branches. If the list is empty, produce builds from all branches. @param num_builds: the generator will stop after providing this many builds. The default of None means to produce as many builds as possible. @type finished_before: int: a timestamp, seconds since the epoch @param finished_before: if provided, do not produce any builds that finished after the given timestamp. @type max_search: int @param max_search: this method may have to examine a lot of builds to find some that match the search parameters, especially if there aren't any matching builds. This argument imposes a hard limit on the number of builds that will be examined within any given Builder. """ def subscribe(receiver): """Register an IStatusReceiver to receive new status events. The receiver will immediately be sent a set of 'builderAdded' messages for all current builders. It will receive further 'builderAdded' and 'builderRemoved' messages as the config file is reloaded and builders come and go. It will also receive 'buildsetSubmitted' messages for all outstanding BuildSets (and each new BuildSet that gets submitted). No additional messages will be sent unless the receiver asks for them by calling .subscribe on the IBuilderStatus objects which accompany the addedBuilder message.""" def unsubscribe(receiver): """Unregister an IStatusReceiver. No further status messages will be delivered.""" class IBuildSetStatus(Interface): """I represent a set of Builds, each run on a separate Builder but all using the same source tree.""" def getReason(): pass def getID(): """Return the BuildSet's ID string, if any. The 'try' feature uses a random string as a BuildSetID to relate submitted jobs with the resulting BuildSet.""" def getResponsibleUsers(): pass # not implemented def getInterestedUsers(): pass # not implemented def getBuilderNames(): """Return a list of the names of all Builders on which this set will do builds. @returns: list of names via Deferred""" def isFinished(): pass def waitUntilFinished(): """Return a Deferred that fires (with this IBuildSetStatus object) when all builds have finished.""" def getResults(): """Return SUCCESS/FAILURE, or None if the buildset is not finished yet""" class IBuildRequestStatus(Interface): """I represent a request to build a particular set of source code on a particular Builder. These requests may be merged by the time they are finally turned into a Build.""" def getSourceStamp(): """ Get a SourceStamp object which can be used to re-create the source tree that this build used. This method will return an absolute SourceStamp if possible, and its results may change as the build progresses. Specifically, a "HEAD" build may later be more accurately specified by an absolute SourceStamp with the specific revision information. This method will return None if the source information is no longer available. @returns: SourceStamp via Deferred """ def getBuilds(): """Return a list of IBuildStatus objects for each Build that has been started in an attempt to satisfy this BuildRequest.""" def subscribe(observer): """Register a callable that will be invoked (with a single IBuildStatus object) for each Build that is created to satisfy this request. There may be multiple Builds created in an attempt to handle the request: they may be interrupted by the user or abandoned due to a lost worker. The last Build (the one which actually gets to run to completion) is said to 'satisfy' the BuildRequest. The observer will be called once for each of these Builds, both old and new.""" def unsubscribe(observer): """Unregister the callable that was registered with subscribe().""" def getSubmitTime(): """Return the time when this request was submitted. Returns a Deferred.""" class IWorkerStatus(Interface): def getName(): """Return the name of the worker.""" def getAdmin(): """Return a string with the worker admin's contact data.""" def getHost(): """Return a string with the worker host info.""" def isConnected(): """Return True if the worker is currently online, False if not.""" def lastMessageReceived(): """Return a timestamp (seconds since epoch) indicating when the most recent message was received from the worker.""" class ISchedulerStatus(Interface): def getName(): """Return the name of this Scheduler (a string).""" def getPendingBuildsets(): """Return an IBuildSet for all BuildSets that are pending. These BuildSets are waiting for their tree-stable-timers to expire.""" # TODO: this is not implemented anywhere class IBuilderStatus(Interface): def getName(): """Return the name of this Builder (a string).""" def getDescription(): """Return the description of this builder (a string).""" def getState(): # TODO: this isn't nearly as meaningful as it used to be """Return a tuple (state, builds) for this Builder. 'state' is the so-called 'big-status', indicating overall status (as opposed to which step is currently running). It is a string, one of 'offline', 'idle', or 'building'. 'builds' is a list of IBuildStatus objects (possibly empty) representing the currently active builds.""" def getWorkers(): """Return a list of IWorkerStatus objects for the workers that are used by this builder.""" def getPendingBuildRequestStatuses(): """ Get a L{IBuildRequestStatus} implementations for all unclaimed build requests. @returns: list of objects via Deferred """ def getCurrentBuilds(): """Return a list containing an IBuildStatus object for each build currently in progress.""" # again, we could probably provide an object for 'waiting' and # 'interlocked' too, but things like the Change list might still be # subject to change def getLastFinishedBuild(): """Return the IBuildStatus object representing the last finished build, which may be None if the builder has not yet finished any builds.""" def getBuild(number): """Return an IBuildStatus object for a historical build. Each build is numbered (starting at 0 when the Builder is first added), getBuild(n) will retrieve the Nth such build. getBuild(-n) will retrieve a recent build, with -1 being the most recent build started. If the Builder is idle, this will be the same as getLastFinishedBuild(). If the Builder is active, it will be an unfinished build. This method will return None if the build is no longer available. Older builds are likely to have less information stored: Logs are the first to go, then Steps.""" def getEvent(number): """Return an IStatusEvent object for a recent Event. Builders connecting and disconnecting are events, as are ping attempts. getEvent(-1) will return the most recent event. Events are numbered, but it probably doesn't make sense to ever do getEvent(+n).""" def generateFinishedBuilds(branches=None, num_builds=None, max_buildnum=None, finished_before=None, max_search=200, ): """Return a generator that will produce IBuildStatus objects each time you invoke its .next() method, starting with the most recent finished build, then the previous build, and so on back to the oldest build available. @param branches: this is a list of branch names, and the generator will only produce builds that involve the given branches. If the list is empty, the generator will produce all builds regardless of what branch they used. @param num_builds: if provided, the generator will stop after providing this many builds. The default of None means to produce as many builds as possible. @param max_buildnum: if provided, the generator will start by providing the build with this number, or the highest-numbered preceding build (i.e. the generator will not produce any build numbered *higher* than max_buildnum). The default of None means to start with the most recent finished build. -1 means the same as None. -2 means to start with the next-most-recent completed build, etc. @type finished_before: int: a timestamp, seconds since the epoch @param finished_before: if provided, do not produce any builds that finished after the given timestamp. @type max_search: int @param max_search: this method may have to examine a lot of builds to find some that match the search parameters, especially if there aren't any matching builds. This argument imposes a hard limit on the number of builds that will be examined. """ def subscribe(receiver): """Register an IStatusReceiver to receive new status events. The receiver will be given builderChangedState, buildStarted, and buildFinished messages.""" def unsubscribe(receiver): """Unregister an IStatusReceiver. No further status messages will be delivered.""" class IEventSource(Interface): def eventGenerator(branches=None, categories=None, committers=None, projects=None, minTime=0): """This function creates a generator which will yield all of this object's status events, starting with the most recent and progressing backwards in time. These events provide the IStatusEvent interface. At the moment they are all instances of buildbot.status.builder.Event or buildbot.status.builder.BuildStepStatus . @param branches: a list of branch names. The generator should only return events that are associated with these branches. If the list is empty, events for all branches should be returned (i.e. an empty list means 'accept all' rather than 'accept none'). @param categories: a list of category names. The generator should only return events that are categorized within the given category. If the list is empty, events for all categories should be returned. @param comitters: a list of committers. The generator should only return events caused by one of the listed committers. If the list is empty or None, events from every committers should be returned. @param minTime: a timestamp. Do not generate events occurring prior to this timestamp. """ class IBuildStatus(Interface): """I represent the status of a single Build/BuildRequest. It could be in-progress or finished.""" def getBuilder(): """ Return the BuilderStatus that owns this build. @rtype: implementer of L{IBuilderStatus} """ def isFinished(): """Return a boolean. True means the build has finished, False means it is still running.""" def waitUntilFinished(): """Return a Deferred that will fire when the build finishes. If the build has already finished, this deferred will fire right away. The callback is given this IBuildStatus instance as an argument.""" def getReason(): """Return a string that indicates why the build was run. 'changes', 'forced', and 'periodic' are the most likely values. 'try' will be added in the future.""" def getChanges(): """Return a list of Change objects which represent which source changes went into the build.""" def getRevisions(): """Returns a string representing the list of revisions that led to the build, rendered from each Change.revision""" def getResponsibleUsers(): """Return a list of Users who are to blame for the changes that went into this build. If anything breaks (at least anything that wasn't already broken), blame them. Specifically, this is the set of users who were responsible for the Changes that went into this build. Each User is a string, corresponding to their name as known by the VC repository.""" def getInterestedUsers(): """Return a list of Users who will want to know about the results of this build but who did not actually make the Changes that went into it (build sheriffs, code-domain owners).""" def getNumber(): """Within each builder, each Build has a number. Return it.""" def getPreviousBuild(): """Convenience method. Returns None if the previous build is unavailable.""" def getSteps(): """Return a list of dictionary objects. For invariant builds (those which always use the same set of Steps), this should always return the complete list, however some of the steps may not have started yet (step.getTimes()[0] will be None). For variant builds, this may not be complete (asking again later may give you more of them).""" def getTimes(): """Returns a tuple of (start, end). 'start' and 'end' are the times (seconds since the epoch) when the Build started and finished. If the build is still running, 'end' will be None.""" # while the build is running, the following methods make sense. # Afterwards they return None def getCurrentStep(): """Return a dictionary object representing the currently active step.""" # Once you know the build has finished, the following methods are legal. # Before this build has finished, they all return None. def getWorkername(): """Return the name of the worker which handled this build.""" def getText(): """Returns a list of strings to describe the build. These are intended to be displayed in a narrow column. If more space is available, the caller should join them together with spaces before presenting them to the user.""" def getResults(): """Return a constant describing the results of the build: one of the constants in buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, SKIPPED or EXCEPTION.""" def getLogs(): """Return a list of logs that describe the build as a whole. Some steps will contribute their logs, while others are are less important and will only be accessible through dictionary obtained from `getSteps`. Each log is an object which implements the IStatusLog interface.""" def getTestResults(): """Return a dictionary that maps test-name tuples to ITestResult objects. This may return an empty or partially-filled dictionary until the build has completed.""" # subscription interface def subscribe(receiver, updateInterval=None): """Register an IStatusReceiver to receive new status events. The receiver will be given stepStarted and stepFinished messages. If 'updateInterval' is non-None, buildETAUpdate messages will be sent every 'updateInterval' seconds.""" def unsubscribe(receiver): """Unregister an IStatusReceiver. No further status messages will be delivered.""" class IStatusEvent(Interface): """I represent a Builder Event, something non-Build related that can happen to a Builder.""" def getTimes(): """Returns a tuple of (start, end) but end==0 indicates that this is a 'point event', which has no duration. WorkerConnect/Disconnect are point events. Ping is not: it starts when requested and ends when the response (positive or negative) is returned""" def getText(): """Returns a list of strings which describe the event. These are intended to be displayed in a narrow column. If more space is available, the caller should join them together with spaces before presenting them to the user.""" class IStatusLogConsumer(Interface): """I am an object which can be passed to IStatusLog.subscribeConsumer(). I represent a target for writing the contents of an IStatusLog. This differs from a regular IStatusReceiver in that it can pause the producer. This makes it more suitable for use in streaming data over network sockets, such as an HTTP request. Note that the consumer can only pause the producer until it has caught up with all the old data. After that point, C{pauseProducing} is ignored and all new output from the log is sent directory to the consumer.""" def registerProducer(producer, streaming): """A producer is being hooked up to this consumer. The consumer only has to handle a single producer. It should send .pauseProducing and .resumeProducing messages to the producer when it wants to stop or resume the flow of data. 'streaming' will be set to True because the producer is always a PushProducer. """ def unregisterProducer(): """The previously-registered producer has been removed. No further pauseProducing or resumeProducing calls should be made. The consumer should delete its reference to the Producer so it can be released.""" def writeChunk(chunk): """A chunk (i.e. a tuple of (channel, text)) is being written to the consumer.""" def finish(): """The log has finished sending chunks to the consumer.""" class IStatusReceiver(IPlugin): """I am an object which can receive build status updates. I may be subscribed to an IStatus, an IBuilderStatus, or an IBuildStatus.""" def buildsetSubmitted(buildset): """A new BuildSet has been submitted to the buildmaster. @type buildset: implementer of L{IBuildSetStatus} """ def requestSubmitted(request): """A new BuildRequest has been submitted to the buildmaster. @type request: implementer of L{IBuildRequestStatus} """ def requestCancelled(builder, request): """A BuildRequest has been cancelled on the given Builder. @type builder: L{buildbot.status.builder.BuilderStatus} @type request: implementer of L{IBuildRequestStatus} """ def builderAdded(builderName, builder): """ A new Builder has just been added. This method may return an IStatusReceiver (probably 'self') which will be subscribed to receive builderChangedState and buildStarted/Finished events. @type builderName: string @type builder: L{buildbot.status.builder.BuilderStatus} @rtype: implementer of L{IStatusReceiver} """ def builderChangedState(builderName, state): """Builder 'builderName' has changed state. The possible values for 'state' are 'offline', 'idle', and 'building'.""" def buildStarted(builderName, build): """Builder 'builderName' has just started a build. The build is an object which implements IBuildStatus, and can be queried for more information. This method may return an IStatusReceiver (it could even return 'self'). If it does so, stepStarted and stepFinished methods will be invoked on the object for the steps of this one build. This is a convenient way to subscribe to all build steps without missing any. This receiver will automatically be unsubscribed when the build finishes. It can also return a tuple of (IStatusReceiver, interval), in which case buildETAUpdate messages are sent ever 'interval' seconds, in addition to the stepStarted and stepFinished messages.""" def buildETAUpdate(build, ETA): """This is a periodic update on the progress this Build has made towards completion.""" def changeAdded(change): """A new Change was added to the ChangeMaster. By the time this event is received, all schedulers have already received the change.""" def stepStarted(build, step): """A step has just started. 'step' is a dictionary which represents the step: it can be queried for more information. This method may return an IStatusReceiver (it could even return 'self'). If it does so, logStarted and logFinished methods will be invoked on the object for logs created by this one step. This receiver will be automatically unsubscribed when the step finishes. Alternatively, the method may return a tuple of an IStatusReceiver and an integer named 'updateInterval'.""" def stepTextChanged(build, step, text): """The text for a step has been updated. This is called when calling setText() on the step status, and hands in the text list.""" def stepText2Changed(build, step, text2): """The text2 for a step has been updated. This is called when calling setText2() on the step status, and hands in text2 list.""" def logStarted(build, step, log): """A new Log has been started, probably because a step has just started running a shell command. 'log' is the IStatusLog object which can be queried for more information. This method may return an IStatusReceiver (such as 'self'), in which case the target's logChunk method will be invoked as text is added to the logfile. This receiver will automatically be unsubsribed when the log finishes.""" def logChunk(build, step, log, channel, text): """Some text has been added to this log. 'channel' is one of LOG_CHANNEL_STDOUT, LOG_CHANNEL_STDERR, or LOG_CHANNEL_HEADER, as defined in IStatusLog.getChunks.""" def logFinished(build, step, log): """A Log has been closed.""" def stepFinished(build, step, results): """A step has just finished. 'results' is the result tuple described in IBuildStepStatus.getResults.""" def buildFinished(builderName, build, results): """ A build has just finished. 'results' is the result tuple described in L{IBuildStatus.getResults}. @type builderName: string @type build: L{buildbot.status.build.BuildStatus} @type results: tuple """ def builderRemoved(builderName): """The Builder has been removed.""" def workerConnected(workerName): """The worker has connected.""" def workerDisconnected(workerName): """The worker has disconnected.""" def checkConfig(otherStatusReceivers): """Verify that there are no other status receivers which conflict with the current one. @type otherStatusReceivers: A list of L{IStatusReceiver} objects which will contain self. """ class IBuilderControl(Interface): def rebuildBuild(buildStatus, reason=""): """Rebuild something we've already built before. This submits a BuildRequest to our Builder using the same SourceStamp as the earlier build. This has no effect (but may eventually raise an exception) if this Build has not yet finished.""" def getPendingBuildRequestControls(): """ Get a list of L{IBuildRequestControl} objects for this Builder. Each one corresponds to an unclaimed build request. @returns: list of objects via Deferred """ def getBuild(number): """Attempt to return an IBuildControl object for the given build. Returns None if no such object is available. This will only work for the build that is currently in progress: once the build finishes, there is nothing to control anymore.""" def ping(): """Attempt to contact the worker and see if it is still alive. This returns a Deferred which fires with either True (the worker is still alive) or False (the worker did not respond). As a side effect, adds an event to this builder's column in the waterfall display containing the results of the ping. Note that this may not fail for a long time, it is implemented in terms of the timeout on the underlying TCP connection.""" class IBuildRequestControl(Interface): def subscribe(observer): """Register a callable that will be invoked (with a single IBuildControl object) for each Build that is created to satisfy this request. There may be multiple Builds created in an attempt to handle the request: they may be interrupted by the user or abandoned due to a lost worker. The last Build (the one which actually gets to run to completion) is said to 'satisfy' the BuildRequest. The observer will be called once for each of these Builds, both old and new.""" def unsubscribe(observer): """Unregister the callable that was registered with subscribe().""" def cancel(): """Remove the build from the pending queue. Has no effect if the build has already been started.""" class IBuildControl(Interface): def getStatus(): """Return an IBuildStatus object for the Build that I control.""" def stopBuild(reason=""): """Halt the build. This has no effect if the build has already finished.""" class IConfigLoader(Interface): def loadConfig(): """ Load the specified configuration. :return MasterConfig: """ class IHttpResponse(Interface): def content(): """ :returns: raw (``bytes``) content of the response via deferred """ def json(): """ :returns: json decoded content of the response via deferred """ master = Attribute('code', "http status code of the request's response (e.g 200)") class IConfigurator(Interface): def configure(config_dict): """ Alter the buildbot config_dict, as defined in master.cfg like the master.cfg, this is run out of the main reactor thread, so this can block, but this can't call most Buildbot facilities. :returns: None """ buildbot-2.6.0/master/buildbot/locks.py000066400000000000000000000370021361162603000201070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.util import service from buildbot.util import subscription from buildbot.util.eventual import eventually if False: # for debugging pylint: disable=using-constant-test debuglog = log.msg else: debuglog = lambda m: None # noqa class BaseLock: """ Class handling claiming and releasing of L{self}, and keeping track of current and waiting owners. We maintain the wait queue in FIFO order, and ensure that counting waiters in the queue behind exclusive waiters cannot acquire the lock. This ensures that exclusive waiters are not starved. """ description = "" def __init__(self, name, maxCount=1): super().__init__() # Name of the lock self.lockName = name # Current queue, tuples (waiter, LockAccess, deferred) self.waiting = [] # Current owners, tuples (owner, LockAccess) self.owners = [] # maximal number of counting owners self.maxCount = maxCount # current number of claimed exclusive locks (0 or 1), must match # self.owners self._claimed_excl = 0 # current number of claimed counting locks (0 to self.maxCount), must # match self.owners. Note that self.maxCount is not a strict limit, the # number of claimed counting locks may be higher than self.maxCount if # it was lowered by self._claimed_counting = 0 # subscriptions to this lock being released self.release_subs = subscription.SubscriptionPoint("%r releases" % (self,)) def __repr__(self): return self.description def setMaxCount(self, count): old_max_count = self.maxCount self.maxCount = count if count > old_max_count: self._tryWakeUp() def _find_waiting(self, requester): for idx, waiter in enumerate(self.waiting): if waiter[0] is requester: return idx return None def isAvailable(self, requester, access): """ Return a boolean whether the lock is available for claiming """ debuglog("%s isAvailable(%s, %s): self.owners=%r" % (self, requester, access, self.owners)) num_excl, num_counting = self._claimed_excl, self._claimed_counting w_index = self._find_waiting(requester) if w_index is None: w_index = len(self.waiting) ahead = self.waiting[:w_index] if access.mode == 'counting': # Wants counting access return num_excl == 0 and num_counting + len(ahead) < self.maxCount \ and all([w[1].mode == 'counting' for w in ahead]) # else Wants exclusive access return num_excl == 0 and num_counting == 0 and not ahead def _addOwner(self, owner, access): self.owners.append((owner, access)) if access.mode == 'counting': self._claimed_counting += 1 else: self._claimed_excl += 1 assert (self._claimed_excl == 1 and self._claimed_counting == 0) \ or (self._claimed_excl == 0 and self._claimed_excl <= self.maxCount) def _removeOwner(self, owner, access): # returns True if owner removed, False if the lock has been already # released entry = (owner, access) if entry not in self.owners: return False self.owners.remove(entry) if access.mode == 'counting': self._claimed_counting -= 1 else: self._claimed_excl -= 1 return True def claim(self, owner, access): """ Claim the lock (lock must be available) """ debuglog("%s claim(%s, %s)" % (self, owner, access.mode)) assert owner is not None assert self.isAvailable(owner, access), "ask for isAvailable() first" assert isinstance(access, LockAccess) assert access.mode in ['counting', 'exclusive'] self.waiting = [w for w in self.waiting if w[0] is not owner] self._addOwner(owner, access) debuglog(" %s is claimed '%s'" % (self, access.mode)) def subscribeToReleases(self, callback): """Schedule C{callback} to be invoked every time this lock is released. Returns a L{Subscription}.""" return self.release_subs.subscribe(callback) def release(self, owner, access): """ Release the lock """ assert isinstance(access, LockAccess) debuglog("%s release(%s, %s)" % (self, owner, access.mode)) if not self._removeOwner(owner, access): debuglog("%s already released" % self) return self._tryWakeUp() # notify any listeners self.release_subs.deliver() def _tryWakeUp(self): # After an exclusive access, we may need to wake up several waiting. # Break out of the loop when the first waiting client should not be # awakened. num_excl, num_counting = self._claimed_excl, self._claimed_counting for i, (w_owner, w_access, d) in enumerate(self.waiting): if w_access.mode == 'counting': if num_excl > 0 or num_counting >= self.maxCount: break num_counting = num_counting + 1 else: # w_access.mode == 'exclusive' if num_excl > 0 or num_counting > 0: break num_excl = num_excl + 1 # If the waiter has a deferred, wake it up and clear the deferred # from the wait queue entry to indicate that it has been woken. if d: self.waiting[i] = (w_owner, w_access, None) eventually(d.callback, self) def waitUntilMaybeAvailable(self, owner, access): """Fire when the lock *might* be available. The deferred may be fired spuriously and the lock is not necessarily available, thus the caller will need to check with isAvailable() when the deferred fires. A single requester must not have more than one pending waitUntilMaybeAvailable() on a single lock. The caller must guarantee, that once the returned deferred is fired, either the lock is checked for availability and claimed if it's available, or the it is indicated as no longer interesting by calling stopWaitingUntilAvailable(). The caller does not need to do this immediately after deferred is fired, an eventual execution is sufficient. """ debuglog("%s waitUntilAvailable(%s)" % (self, owner)) assert isinstance(access, LockAccess) if self.isAvailable(owner, access): return defer.succeed(self) d = defer.Deferred() # Are we already in the wait queue? w_index = self._find_waiting(owner) if w_index is not None: _, _, old_d = self.waiting[w_index] assert old_d is None, "waitUntilMaybeAvailable() must not be called again before the " \ "previous deferred fired" self.waiting[w_index] = (owner, access, d) else: self.waiting.append((owner, access, d)) return d def stopWaitingUntilAvailable(self, owner, access, d): """ Stop waiting for lock to become available. `d` must be the result of a previous call to `waitUntilMaybeAvailable()`. If `d` has not been woken up already by calling its callback, it will be done as part of this function """ debuglog("%s stopWaitingUntilAvailable(%s)" % (self, owner)) assert isinstance(access, LockAccess) w_index = self._find_waiting(owner) assert w_index is not None, "The owner was not waiting for the lock" _, _, old_d = self.waiting[w_index] if old_d is not None: assert d is old_d, "The supplied deferred must be a result of waitUntilMaybeAvailable()" del self.waiting[w_index] d.callback(None) else: del self.waiting[w_index] # if the callback has already been woken up, then it must schedule another waiter, # otherwise we will have an available lock with a waiter list and no-one to wake the # waiters up. self._tryWakeUp() def isOwner(self, owner, access): return (owner, access) in self.owners class RealMasterLock(BaseLock, service.SharedService): def __init__(self, name): # the caller will want to call updateFromLockId after initialization super().__init__(name, 0) self.config_version = -1 self._updateDescription() def _updateDescription(self): self.description = "".format(self.lockName, self.maxCount) def getLockForWorker(self, workername): return self def updateFromLockId(self, lockid, config_version): assert self.lockName == lockid.name assert isinstance(config_version, int) self.config_version = config_version self.setMaxCount(lockid.maxCount) self._updateDescription() class RealWorkerLock(service.SharedService): def __init__(self, name): super().__init__() # the caller will want to call updateFromLockId after initialization self.lockName = name self.maxCount = None self.maxCountForWorker = None self.config_version = -1 self._updateDescription() self.locks = {} def __repr__(self): return self.description def getLockForWorker(self, workername): if workername not in self.locks: maxCount = self.maxCountForWorker.get(workername, self.maxCount) lock = self.locks[workername] = BaseLock(self.lockName, maxCount) self._updateDescriptionForLock(lock, workername) self.locks[workername] = lock return self.locks[workername] def _updateDescription(self): self.description = \ "".format(self.lockName, self.maxCount, self.maxCountForWorker) def _updateDescriptionForLock(self, lock, workername): lock.description = \ "".format(lock.lockName, lock.maxCount, workername, id(lock)) def updateFromLockId(self, lockid, config_version): assert self.lockName == lockid.name assert isinstance(config_version, int) self.config_version = config_version self.maxCount = lockid.maxCount self.maxCountForWorker = lockid.maxCountForWorker self._updateDescription() for workername, lock in self.locks.items(): maxCount = self.maxCountForWorker.get(workername, self.maxCount) lock.setMaxCount(maxCount) self._updateDescriptionForLock(lock, workername) class LockAccess(util.ComparableMixin): """ I am an object representing a way to access a lock. @param lockid: LockId instance that should be accessed. @type lockid: A MasterLock or WorkerLock instance. @param mode: Mode of accessing the lock. @type mode: A string, either 'counting' or 'exclusive'. """ compare_attrs = ('lockid', 'mode') def __init__(self, lockid, mode, _skipChecks=False): self.lockid = lockid self.mode = mode if not _skipChecks: # these checks fail with mock < 0.8.0 when lockid is a Mock # TODO: remove this in Buildbot-0.9.0+ assert isinstance(lockid, (MasterLock, WorkerLock)) assert mode in ['counting', 'exclusive'] class BaseLockId(util.ComparableMixin): """ Abstract base class for LockId classes. Sets up the 'access()' function for the LockId's available to the user (MasterLock and WorkerLock classes). Derived classes should add - Comparison with the L{util.ComparableMixin} via the L{compare_attrs} class variable. - Link to the actual lock class should be added with the L{lockClass} class variable. """ def access(self, mode): """ Express how the lock should be accessed """ assert mode in ['counting', 'exclusive'] return LockAccess(self, mode) def defaultAccess(self): """ For buildbot 0.7.7 compatibility: When user doesn't specify an access mode, this one is chosen. """ return self.access('counting') # master.cfg should only reference the following MasterLock and WorkerLock # classes. They are identifiers that will be turned into real Locks later, # via the BotMaster.getLockByID method. class MasterLock(BaseLockId): """I am a semaphore that limits the number of simultaneous actions. Builds and BuildSteps can declare that they wish to claim me as they run. Only a limited number of such builds or steps will be able to run simultaneously. By default this number is one, but my maxCount parameter can be raised to allow two or three or more operations to happen at the same time. Use this to protect a resource that is shared among all builders and all workers, for example to limit the load on a common SVN repository. """ compare_attrs = ('name', 'maxCount') lockClass = RealMasterLock def __init__(self, name, maxCount=1): self.name = name self.maxCount = maxCount class WorkerLock(BaseLockId): """I am a semaphore that limits simultaneous actions on each worker. Builds and BuildSteps can declare that they wish to claim me as they run. Only a limited number of such builds or steps will be able to run simultaneously on any given worker. By default this number is one, but my maxCount parameter can be raised to allow two or three or more operations to happen on a single worker at the same time. Use this to protect a resource that is shared among all the builds taking place on each worker, for example to limit CPU or memory load on an underpowered machine. Each worker will get an independent copy of this semaphore. By default each copy will use the same owner count (set with maxCount), but you can provide maxCountForWorker with a dictionary that maps workername to owner count, to allow some workers more parallelism than others. """ compare_attrs = ('name', 'maxCount', '_maxCountForWorkerList') lockClass = RealWorkerLock def __init__(self, name, maxCount=1, maxCountForWorker=None): self.name = name self.maxCount = maxCount if maxCountForWorker is None: maxCountForWorker = {} self.maxCountForWorker = maxCountForWorker # for comparison purposes, turn this dictionary into a stably-sorted # list of tuples self._maxCountForWorkerList = tuple( sorted(self.maxCountForWorker.items())) buildbot-2.6.0/master/buildbot/machine/000077500000000000000000000000001361162603000200245ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/machine/__init__.py000066400000000000000000000013011361162603000221300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-2.6.0/master/buildbot/machine/base.py000066400000000000000000000027121361162603000213120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members from zope.interface import implementer from buildbot import interfaces from buildbot.util import service @implementer(interfaces.IMachine) class Machine(service.BuildbotService): def checkConfig(self, name, **kwargs): super().checkConfig(**kwargs) self.name = name self.workers = [] def reconfigService(self, name, **kwargs): super().reconfigService(**kwargs) assert self.name == name def registerWorker(self, worker): assert worker.machine_name == self.name self.workers.append(worker) def unregisterWorker(self, worker): assert worker in self.workers self.workers.remove(worker) def __repr__(self): return "".format(self.name, id(self)) buildbot-2.6.0/master/buildbot/machine/generic.py000066400000000000000000000150031361162603000220110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.internet import defer from twisted.internet import utils from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot.interfaces import IMachineAction from buildbot.machine.latent import AbstractLatentMachine from buildbot.util import misc from buildbot.util import private_tempdir from buildbot.util.git import getSshArgsForKeys from buildbot.util.git import getSshKnownHostsContents class GenericLatentMachine(AbstractLatentMachine): def checkConfig(self, name, start_action, stop_action, **kwargs): super().checkConfig(name, **kwargs) for action, arg_name in [(start_action, 'start_action'), (stop_action, 'stop_action')]: if not IMachineAction.providedBy(action): msg = "{} of {} does not implement required " \ "interface".format(arg_name, self.name) raise Exception(msg) def reconfigService(self, name, start_action, stop_action, **kwargs): super().reconfigService(name, **kwargs) self.start_action = start_action self.stop_action = stop_action def start_machine(self): return self.start_action.perform(self) def stop_machine(self): return self.stop_action.perform(self) @defer.inlineCallbacks def runProcessLogFailures(bin, args, expectedCode=0): stdout, stderr, code = yield utils.getProcessOutputAndValue(bin, args) if code != expectedCode: log.err(('Got unexpected return code when running {} {}: ' 'code: {}, stdout: {}, stderr: {}').format(bin, args, code, stdout, stderr)) return False return True class _LocalMachineActionMixin: def setupLocal(self, command): if not isinstance(command, list): config.error('command parameter must be a list') self._command = command @defer.inlineCallbacks def perform(self, manager): args = yield manager.renderSecrets(self._command) return (yield runProcessLogFailures(args[0], args[1:])) class _SshActionMixin: def setupSsh(self, sshBin, host, remoteCommand, sshKey=None, sshHostKey=None): if not isinstance(sshBin, str): config.error('sshBin parameter must be a string') if not isinstance(host, str): config.error('host parameter must be a string') if not isinstance(remoteCommand, list): config.error('remoteCommand parameter must be a list') self._sshBin = sshBin self._host = host self._remoteCommand = remoteCommand self._sshKey = sshKey self._sshHostKey = sshHostKey @defer.inlineCallbacks def _performImpl(self, manager, key_path, known_hosts_path): args = getSshArgsForKeys(key_path, known_hosts_path) args.append((yield manager.renderSecrets(self._host))) args.extend((yield manager.renderSecrets(self._remoteCommand))) return (yield runProcessLogFailures(self._sshBin, args)) @defer.inlineCallbacks def _prepareSshKeys(self, manager, temp_dir_path): key_path = None if self._sshKey is not None: ssh_key_data = yield manager.renderSecrets(self._sshKey) key_path = os.path.join(temp_dir_path, 'ssh-key') misc.writeLocalFile(key_path, ssh_key_data, mode=stat.S_IRUSR) known_hosts_path = None if self._sshHostKey is not None: ssh_host_key_data = yield manager.renderSecrets(self._sshHostKey) ssh_host_key_data = getSshKnownHostsContents(ssh_host_key_data) known_hosts_path = os.path.join(temp_dir_path, 'ssh-known-hosts') misc.writeLocalFile(known_hosts_path, ssh_host_key_data) defer.returnValue((key_path, known_hosts_path)) @defer.inlineCallbacks def perform(self, manager): if self._sshKey is not None or self._sshHostKey is not None: with private_tempdir.PrivateTemporaryDirectory( prefix='ssh-', dir=manager.master.basedir) as temp_dir: key_path, hosts_path = yield self._prepareSshKeys(manager, temp_dir) ret = yield self._performImpl(manager, key_path, hosts_path) else: ret = yield self._performImpl(manager, None, None) defer.returnValue(ret) @implementer(IMachineAction) class LocalWakeAction(_LocalMachineActionMixin): def __init__(self, command): self.setupLocal(command) class LocalWOLAction(LocalWakeAction): def __init__(self, wakeMac, wolBin='wakeonlan'): LocalWakeAction.__init__(self, [wolBin, wakeMac]) @implementer(IMachineAction) class RemoteSshWakeAction(_SshActionMixin): def __init__(self, host, remoteCommand, sshBin='ssh', sshKey=None, sshHostKey=None): self.setupSsh(sshBin, host, remoteCommand, sshKey=sshKey, sshHostKey=sshHostKey) class RemoteSshWOLAction(RemoteSshWakeAction): def __init__(self, host, wakeMac, wolBin='wakeonlan', sshBin='ssh', sshKey=None, sshHostKey=None): RemoteSshWakeAction.__init__(self, host, [wolBin, wakeMac], sshBin=sshBin, sshKey=sshKey, sshHostKey=sshHostKey) @implementer(IMachineAction) class RemoteSshSuspendAction(_SshActionMixin): def __init__(self, host, remoteCommand=None, sshBin='ssh', sshKey=None, sshHostKey=None): if remoteCommand is None: remoteCommand = ['systemctl', 'suspend'] self.setupSsh(sshBin, host, remoteCommand, sshKey=sshKey, sshHostKey=sshHostKey) buildbot-2.6.0/master/buildbot/machine/latent.py000066400000000000000000000146351361162603000216760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import enum from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot.machine.base import Machine from buildbot.util import Notifier class States(enum.Enum): # Represents the state of LatentMachine STOPPED = 0 STARTING = 1 STARTED = 2 STOPPING = 3 @implementer(interfaces.ILatentMachine) class AbstractLatentMachine(Machine): DEFAULT_MISSING_TIMEOUT = 20 * 60 def checkConfig(self, name, build_wait_timeout=0, missing_timeout=DEFAULT_MISSING_TIMEOUT, **kwargs): super().checkConfig(name, **kwargs) self.state = States.STOPPED self.latent_workers = [] def reconfigService(self, name, build_wait_timeout=0, missing_timeout=DEFAULT_MISSING_TIMEOUT, **kwargs): super().reconfigService(name, **kwargs) self.build_wait_timeout = build_wait_timeout self.missing_timeout = missing_timeout for worker in self.workers: if not interfaces.ILatentWorker.providedBy(worker): raise Exception('Worker is not latent {}'.format( worker.name)) self.state = States.STOPPED self._start_notifier = Notifier() self._stop_notifier = Notifier() self._build_wait_timer = None self._missing_timer = None def start_machine(self): # Responsible for starting the machine. The function should return a # deferred which should result in True if the startup has been # successful, or False otherwise. raise NotImplementedError def stop_machine(self): # Responsible for shutting down the machine raise NotImplementedError @defer.inlineCallbacks def substantiate(self, starting_worker): if self.state == States.STOPPING: # wait until stop action finishes yield self._stop_notifier.wait() if self.state == States.STARTED: # may happen if we waited for stop to complete and in the mean # time the machine was successfully woken. return True # wait for already proceeding startup to finish, if any if self.state == States.STARTING: return (yield self._start_notifier.wait()) self.state = States.STARTING # substantiate all workers that will start if we wake the machine. We # do so before waking the machine to guarantee that we're already # waiting for worker connection as waking may take time confirming # machine came online. We'll call substantiate on the worker that # invoked this function again, but that's okay as that function is # reentrant. Note that we substantiate without gathering results # because the original call to substantiate will get them anyway and # we don't want to be slowed down by other workers on the machine. for worker in self.workers: if worker.starts_without_substantiate: worker.substantiate(None, None) # Start the machine. We don't need to wait for any workers to actually # come online as that's handled in their substantiate() functions. try: ret = yield self.start_machine() except Exception as e: log.err(e, 'while starting latent machine {0}'.format(self.name)) ret = False if not ret: yield defer.DeferredList([worker.insubstantiate() for worker in self.workers], consumeErrors=True) else: self._setMissingTimer() self.state = States.STARTED if ret else States.STOPPED self._start_notifier.notify(ret) return ret @defer.inlineCallbacks def _stop(self): if any(worker.building for worker in self.workers) or \ self.state == States.STARTING: return None if self.state == States.STOPPING: yield self._stop_notifier.wait() return None self.state = States.STOPPING # wait until workers insubstantiate, then stop yield defer.DeferredList([worker.insubstantiate() for worker in self.workers], consumeErrors=True) try: yield self.stop_machine() except Exception as e: log.err(e, 'while stopping latent machine {0}'.format( self.name)) self.state = States.STOPPED self._stop_notifier.notify(None) def notifyBuildStarted(self): self._clearMissingTimer() def notifyBuildFinished(self): if any(worker.building for worker in self.workers): self._clearBuildWaitTimer() else: self._setBuildWaitTimer() def _clearMissingTimer(self): if self._missing_timer is not None: if self._missing_timer.active(): self._missing_timer.cancel() self._missing_timer = None def _setMissingTimer(self): self._clearMissingTimer() self._missing_timer = self.master.reactor.callLater( self.missing_timeout, self._stop) def _clearBuildWaitTimer(self): if self._build_wait_timer is not None: if self._build_wait_timer.active(): self._build_wait_timer.cancel() self._build_wait_timer = None def _setBuildWaitTimer(self): self._clearBuildWaitTimer() self._build_wait_timer = self.master.reactor.callLater( self.build_wait_timeout, self._stop) def __repr__(self): return "".format(self.name, id(self)) buildbot-2.6.0/master/buildbot/machine/manager.py000066400000000000000000000023011361162603000220040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members from buildbot.util import service from buildbot.worker.manager import WorkerManager class MachineManager(service.BuildbotServiceManager): reconfig_priority = WorkerManager.reconfig_priority + 1 name = 'MachineManager' managed_services_name = 'machines' config_attr = 'machines' @property def machines(self): return self.namedServices def getMachineByName(self, name): if name in self.machines: return self.machines[name] return None buildbot-2.6.0/master/buildbot/manhole.py000066400000000000000000000316641361162603000204270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import binascii import os import types from twisted.application import strports from twisted.conch import manhole from twisted.conch import telnet from twisted.conch.insults import insults from twisted.cred import checkers from twisted.cred import portal from twisted.internet import protocol from twisted.python import log from zope.interface import implementer # requires Twisted-2.0 or later from buildbot import config from buildbot.util import ComparableMixin from buildbot.util import service from buildbot.util import unicode2bytes try: from twisted.conch import checkers as conchc, manhole_ssh from twisted.conch.openssh_compat.factory import OpenSSHFactory _hush_pyflakes = [manhole_ssh, conchc, OpenSSHFactory] del _hush_pyflakes except ImportError: manhole_ssh = None conchc = None OpenSSHFactory = None # makeTelnetProtocol and _TelnetRealm are for the TelnetManhole class makeTelnetProtocol: # this curries the 'portal' argument into a later call to # TelnetTransport() def __init__(self, portal): self.portal = portal def __call__(self): auth = telnet.AuthenticatingTelnetProtocol return telnet.TelnetTransport(auth, self.portal) @implementer(portal.IRealm) class _TelnetRealm: def __init__(self, namespace_maker): self.namespace_maker = namespace_maker def requestAvatar(self, avatarId, *interfaces): if telnet.ITelnetProtocol in interfaces: namespace = self.namespace_maker() p = telnet.TelnetBootstrapProtocol(insults.ServerProtocol, manhole.ColoredManhole, namespace) return (telnet.ITelnetProtocol, p, lambda: None) raise NotImplementedError() class chainedProtocolFactory: # this curries the 'namespace' argument into a later call to # chainedProtocolFactory() def __init__(self, namespace): self.namespace = namespace def __call__(self): return insults.ServerProtocol(manhole.ColoredManhole, self.namespace) if conchc: class AuthorizedKeysChecker(conchc.SSHPublicKeyDatabase): """Accept connections using SSH keys from a given file. SSHPublicKeyDatabase takes the username that the prospective client has requested and attempts to get a ~/.ssh/authorized_keys file for that username. This requires root access, so it isn't as useful as you'd like. Instead, this subclass looks for keys in a single file, given as an argument. This file is typically kept in the buildmaster's basedir. The file should have 'ssh-dss ....' lines in it, just like authorized_keys. """ def __init__(self, authorized_keys_file): self.authorized_keys_file = os.path.expanduser( authorized_keys_file) def checkKey(self, credentials): with open(self.authorized_keys_file, "rb") as f: for l in f.readlines(): l2 = l.split() if len(l2) < 2: continue try: if base64.decodestring(l2[1]) == credentials.blob: return 1 except binascii.Error: continue return 0 class _BaseManhole(service.AsyncMultiService): """This provides remote access to a python interpreter (a read/exec/print loop) embedded in the buildmaster via an internal SSH server. This allows detailed inspection of the buildmaster state. It is of most use to buildbot developers. Connect to this by running an ssh client. """ def __init__(self, port, checker, ssh_hostkey_dir=None): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @type checker: an object providing the L{twisted.cred.checkers.ICredentialsChecker} interface @param checker: if provided, this checker is used to authenticate the client instead of using the username/password scheme. You must either provide a username/password or a Checker. Some useful values are:: import twisted.cred.checkers as credc import twisted.conch.checkers as conchc c = credc.AllowAnonymousAccess # completely open c = credc.FilePasswordDB(passwd_filename) # file of name:passwd c = conchc.UNIXPasswordDatabase # getpwnam() (probably /etc/passwd) @type ssh_hostkey_dir: str @param ssh_hostkey_dir: directory which contains ssh host keys for this server """ # unfortunately, these don't work unless we're running as root # c = credc.PluggableAuthenticationModulesChecker: PAM # c = conchc.SSHPublicKeyDatabase() # ~/.ssh/authorized_keys # and I can't get UNIXPasswordDatabase to work super().__init__() if isinstance(port, int): port = "tcp:%d" % port self.port = port # for comparison later self.checker = checker # to maybe compare later def makeNamespace(): master = self.master namespace = { 'master': master, 'status': master.getStatus(), 'show': show, } return namespace def makeProtocol(): namespace = makeNamespace() p = insults.ServerProtocol(manhole.ColoredManhole, namespace) return p self.ssh_hostkey_dir = ssh_hostkey_dir if self.ssh_hostkey_dir: self.using_ssh = True if not self.ssh_hostkey_dir: raise ValueError("Most specify a value for ssh_hostkey_dir") r = manhole_ssh.TerminalRealm() r.chainedProtocolFactory = makeProtocol p = portal.Portal(r, [self.checker]) f = manhole_ssh.ConchFactory(p) openSSHFactory = OpenSSHFactory() openSSHFactory.dataRoot = self.ssh_hostkey_dir openSSHFactory.dataModuliRoot = self.ssh_hostkey_dir f.publicKeys = openSSHFactory.getPublicKeys() f.privateKeys = openSSHFactory.getPrivateKeys() else: self.using_ssh = False r = _TelnetRealm(makeNamespace) p = portal.Portal(r, [self.checker]) f = protocol.ServerFactory() f.protocol = makeTelnetProtocol(p) s = strports.service(self.port, f) s.setServiceParent(self) def startService(self): if self.using_ssh: via = "via SSH" else: via = "via telnet" log.msg("Manhole listening %s on port %s" % (via, self.port)) return super().startService() class TelnetManhole(_BaseManhole, ComparableMixin): """This Manhole accepts unencrypted (telnet) connections, and requires a username and password authorize access. You are encouraged to use the encrypted ssh-based manhole classes instead.""" compare_attrs = ("port", "username", "password") def __init__(self, port, username, password): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param username: @param password: username= and password= form a pair of strings to use when authenticating the remote user. """ self.username = username self.password = password c = checkers.InMemoryUsernamePasswordDatabaseDontUse() c.addUser(unicode2bytes(username), unicode2bytes(password)) super().__init__(port, c) class PasswordManhole(_BaseManhole, ComparableMixin): """This Manhole accepts encrypted (ssh) connections, and requires a username and password to authorize access. """ compare_attrs = ("port", "username", "password", "ssh_hostkey_dir") def __init__(self, port, username, password, ssh_hostkey_dir): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param username: @param password: username= and password= form a pair of strings to use when authenticating the remote user. @type ssh_hostkey_dir: str @param ssh_hostkey_dir: directory which contains ssh host keys for this server """ if not manhole_ssh: config.error("cryptography required for ssh mahole.") self.username = username self.password = password self.ssh_hostkey_dir = ssh_hostkey_dir c = checkers.InMemoryUsernamePasswordDatabaseDontUse() c.addUser(unicode2bytes(username), unicode2bytes(password)) super().__init__(port, c, ssh_hostkey_dir) class AuthorizedKeysManhole(_BaseManhole, ComparableMixin): """This Manhole accepts ssh connections, and requires that the prospective client have an ssh private key that matches one of the public keys in our authorized_keys file. It is created with the name of a file that contains the public keys that we will accept.""" compare_attrs = ("port", "keyfile", "ssh_hostkey_dir") def __init__(self, port, keyfile, ssh_hostkey_dir): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param keyfile: the name of a file (relative to the buildmaster's basedir) that contains SSH public keys of authorized users, one per line. This is the exact same format as used by sshd in ~/.ssh/authorized_keys . @type ssh_hostkey_dir: str @param ssh_hostkey_dir: directory which contains ssh host keys for this server """ if not manhole_ssh: config.error("cryptography required for ssh mahole.") # TODO: expanduser this, and make it relative to the buildmaster's # basedir self.keyfile = keyfile c = AuthorizedKeysChecker(keyfile) super().__init__(port, c, ssh_hostkey_dir) class ArbitraryCheckerManhole(_BaseManhole, ComparableMixin): """This Manhole accepts ssh connections, but uses an arbitrary user-supplied 'checker' object to perform authentication.""" compare_attrs = ("port", "checker") def __init__(self, port, checker): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param checker: an instance of a twisted.cred 'checker' which will perform authentication """ if not manhole_ssh: config.error("cryptography required for ssh mahole.") super().__init__(port, checker) # utility functions for the manhole def show(x): """Display the data attributes of an object in a readable format""" print("data attributes of %r" % (x,)) names = dir(x) maxlen = max([0] + [len(n) for n in names]) for k in names: v = getattr(x, k) if isinstance(v, types.MethodType): continue if k[:2] == '__' and k[-2:] == '__': continue if isinstance(v, str): if len(v) > 80 - maxlen - 5: v = repr(v[:80 - maxlen - 5]) + "..." elif isinstance(v, (int, type(None))): v = str(v) elif isinstance(v, (list, tuple, dict)): v = "%s (%d elements)" % (v, len(v)) else: v = str(type(v)) print("%*s : %s" % (maxlen, k, v)) return x buildbot-2.6.0/master/buildbot/master.py000066400000000000000000000424641361162603000202770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import socket from twisted.application import internet from twisted.internet import defer from twisted.internet import task from twisted.internet import threads from twisted.python import failure from twisted.python import log import buildbot import buildbot.pbmanager from buildbot import config from buildbot import monkeypatches from buildbot.buildbot_net_usage_data import sendBuildbotNetUsageData from buildbot.changes.manager import ChangeManager from buildbot.data import connector as dataconnector from buildbot.db import connector as dbconnector from buildbot.db import exceptions from buildbot.machine.manager import MachineManager from buildbot.mq import connector as mqconnector from buildbot.process import cache from buildbot.process import debug from buildbot.process import metrics from buildbot.process.botmaster import BotMaster from buildbot.process.users.manager import UserManagerManager from buildbot.schedulers.manager import SchedulerManager from buildbot.secrets.manager import SecretManager from buildbot.status.master import Status from buildbot.util import check_functional_environment from buildbot.util import service from buildbot.util.eventual import eventually from buildbot.wamp import connector as wampconnector from buildbot.worker import manager as workermanager from buildbot.www import service as wwwservice class LogRotation: def __init__(self): self.rotateLength = 1 * 1000 * 1000 self.maxRotatedFiles = 10 class BuildMaster(service.ReconfigurableServiceMixin, service.MasterService): # multiplier on RECLAIM_BUILD_INTERVAL at which a build is considered # unclaimed; this should be at least 2 to avoid false positives UNCLAIMED_BUILD_FACTOR = 6 def __init__(self, basedir, configFileName=None, umask=None, reactor=None, config_loader=None): super().__init__() if reactor is None: from twisted.internet import reactor self.reactor = reactor self.setName("buildmaster") self.umask = umask self.basedir = basedir if basedir is not None: # None is used in tests assert os.path.isdir(self.basedir) if config_loader is not None and configFileName is not None: raise config.ConfigErrors([ "Can't specify both `config_loader` and `configFilename`.", ]) if config_loader is None: if configFileName is None: configFileName = 'master.cfg' config_loader = config.FileLoader(self.basedir, configFileName) self.config_loader = config_loader self.configFileName = configFileName # flag so we don't try to do fancy things before the master is ready self._master_initialized = False self.initLock = defer.DeferredLock() # set up child services self._services_d = self.create_child_services() # db configured values self.configured_db_url = None # configuration / reconfiguration handling self.config = config.MasterConfig() self.config_version = 0 # increased by one on each reconfig self.reconfig_active = False self.reconfig_requested = False self.reconfig_notifier = None # this stores parameters used in the tac file, and is accessed by the # WebStatus to duplicate those values. self.log_rotation = LogRotation() # local cache for this master's object ID self._object_id = None # Check environment is sensible check_functional_environment(self.config) # figure out local hostname try: self.hostname = os.uname()[1] # only on unix except AttributeError: self.hostname = socket.getfqdn() # public attributes self.name = ("%s:%s" % (self.hostname, os.path.abspath(self.basedir or '.'))) if isinstance(self.name, bytes): self.name = self.name.decode('ascii', 'replace') self.masterid = None @defer.inlineCallbacks def create_child_services(self): # note that these are order-dependent. If you get the order wrong, # you'll know it, as the master will fail to start. self.metrics = metrics.MetricLogObserver() yield self.metrics.setServiceParent(self) self.caches = cache.CacheManager() yield self.caches.setServiceParent(self) self.pbmanager = buildbot.pbmanager.PBManager() yield self.pbmanager.setServiceParent(self) self.workers = workermanager.WorkerManager(self) yield self.workers.setServiceParent(self) self.change_svc = ChangeManager() yield self.change_svc.setServiceParent(self) self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self) self.machine_manager = MachineManager() yield self.machine_manager.setServiceParent(self) self.scheduler_manager = SchedulerManager() yield self.scheduler_manager.setServiceParent(self) self.user_manager = UserManagerManager(self) yield self.user_manager.setServiceParent(self) self.db = dbconnector.DBConnector(self.basedir) yield self.db.setServiceParent(self) self.wamp = wampconnector.WampConnector() yield self.wamp.setServiceParent(self) self.mq = mqconnector.MQConnector() yield self.mq.setServiceParent(self) self.data = dataconnector.DataConnector() yield self.data.setServiceParent(self) self.www = wwwservice.WWWService() yield self.www.setServiceParent(self) self.debug = debug.DebugServices() yield self.debug.setServiceParent(self) self.status = Status() yield self.status.setServiceParent(self) self.secrets_manager = SecretManager() yield self.secrets_manager.setServiceParent(self) self.secrets_manager.reconfig_priority = 2000 self.service_manager = service.BuildbotServiceManager() yield self.service_manager.setServiceParent(self) self.service_manager.reconfig_priority = 1000 self.masterHouskeepingTimer = 0 @defer.inlineCallbacks def heartbeat(): if self.masterid is not None: yield self.data.updates.masterActive(name=self.name, masterid=self.masterid) yield self.data.updates.expireMasters() self.masterHeartbeatService = internet.TimerService(60, heartbeat) self.masterHeartbeatService.clock = self.reactor # we do setServiceParent only when the master is configured # master should advertise itself only at that time # setup and reconfig handling _already_started = False @defer.inlineCallbacks def startService(self): assert not self._already_started, "can only start the master once" self._already_started = True # ensure child services have been set up. Normally we would do this in serServiceParent, # but buildmaster is used in contexts we can't control. if self._services_d is not None: yield self._services_d self._services_d = None log.msg("Starting BuildMaster -- buildbot.version: %s" % buildbot.version) # Set umask if self.umask is not None: os.umask(self.umask) # first, apply all monkeypatches monkeypatches.patch_all() # we want to wait until the reactor is running, so we can call # reactor.stop() for fatal errors d = defer.Deferred() self.reactor.callWhenRunning(d.callback, None) yield d startup_succeed = False try: yield self.initLock.acquire() # load the configuration file, treating errors as fatal try: # run the master.cfg in thread, so that it can use blocking # code self.config = yield threads.deferToThreadPool( self.reactor, self.reactor.getThreadPool(), self.config_loader.loadConfig) except config.ConfigErrors as e: log.msg("Configuration Errors:") for msg in e.errors: log.msg(" " + msg) log.msg("Halting master.") self.reactor.stop() return except Exception: log.err(failure.Failure(), 'while starting BuildMaster') self.reactor.stop() return # set up services that need access to the config before everything # else gets told to reconfig try: yield self.db.setup() except exceptions.DatabaseNotReadyError: # (message was already logged) self.reactor.stop() return yield self.mq.setup() if hasattr(signal, "SIGHUP"): def sighup(*args): eventually(self.reconfig) signal.signal(signal.SIGHUP, sighup) if hasattr(signal, "SIGUSR1"): def sigusr1(*args): eventually(self.botmaster.cleanShutdown) signal.signal(signal.SIGUSR1, sigusr1) # get the masterid so other services can use it in # startup/reconfig. This goes directly to the DB since the data # API isn't initialized yet, and anyway, this method is aware of # the DB API since it just called its setup function self.masterid = yield self.db.masters.findMasterId( name=self.name) # mark this master as stopped, in case it crashed before yield self.data.updates.masterStopped(name=self.name, masterid=self.masterid) # call the parent method yield super().startService() # We make sure the housekeeping is done before configuring in order to cleanup # any remaining claimed schedulers or change sources from zombie # masters yield self.data.updates.expireMasters(forceHouseKeeping=True) # give all services a chance to load the new configuration, rather # than the base configuration yield self.reconfigServiceWithBuildbotConfig(self.config) # Mark the master as active now that mq is running yield self.data.updates.masterActive(name=self.name, masterid=self.masterid) # Start the heartbeat timer yield self.masterHeartbeatService.setServiceParent(self) # send the statistics to buildbot.net, without waiting self.sendBuildbotNetUsageData() startup_succeed = True except Exception: f = failure.Failure() log.err(f, 'while starting BuildMaster') self.reactor.stop() finally: if startup_succeed: log.msg("BuildMaster is running") else: log.msg("BuildMaster startup failed") yield self.initLock.release() self._master_initialized = True def sendBuildbotNetUsageData(self): if "TRIAL_PYTHONPATH" in os.environ and self.config.buildbotNetUsageData is not None: raise RuntimeError( "Should not enable buildbotNetUsageData in trial tests!") sendBuildbotNetUsageData(self) @defer.inlineCallbacks def stopService(self): try: yield self.initLock.acquire() if self.masterid is not None: yield self.data.updates.masterStopped( name=self.name, masterid=self.masterid) if self.running: yield self.botmaster.cleanShutdown( quickMode=True, stopReactor=False) yield super().stopService() log.msg("BuildMaster is stopped") self._master_initialized = False finally: yield self.initLock.release() @defer.inlineCallbacks def reconfig(self): # this method wraps doConfig, ensuring it is only ever called once at # a time, and alerting the user if the reconfig takes too long if self.reconfig_active: log.msg("reconfig already active; will reconfig again after") self.reconfig_requested = True return self.reconfig_active = self.reactor.seconds() metrics.MetricCountEvent.log("loaded_config", 1) # notify every 10 seconds that the reconfig is still going on, although # reconfigs should not take that long! self.reconfig_notifier = task.LoopingCall(lambda: log.msg("reconfig is ongoing for %d s" % (self.reactor.seconds() - self.reconfig_active))) self.reconfig_notifier.start(10, now=False) timer = metrics.Timer("BuildMaster.reconfig") timer.start() try: yield self.doReconfig() except Exception as e: log.err(e, 'while reconfiguring') finally: timer.stop() self.reconfig_notifier.stop() self.reconfig_notifier = None self.reconfig_active = False if self.reconfig_requested: self.reconfig_requested = False self.reconfig() @defer.inlineCallbacks def doReconfig(self): log.msg("beginning configuration update") changes_made = False failed = False try: yield self.initLock.acquire() # Run the master.cfg in thread, so that it can use blocking code new_config = yield threads.deferToThreadPool( self.reactor, self.reactor.getThreadPool(), self.config_loader.loadConfig) changes_made = True self.config_version += 1 self.config = new_config yield self.reconfigServiceWithBuildbotConfig(new_config) except config.ConfigErrors as e: for msg in e.errors: log.msg(msg) failed = True except Exception: log.err(failure.Failure(), 'during reconfig:') failed = True finally: yield self.initLock.release() if failed: if changes_made: log.msg("WARNING: reconfig partially applied; master " "may malfunction") else: log.msg("reconfig aborted without making any changes") else: log.msg("configuration update complete") def reconfigServiceWithBuildbotConfig(self, new_config): if self.configured_db_url is None: self.configured_db_url = new_config.db['db_url'] elif (self.configured_db_url != new_config.db['db_url']): config.error( "Cannot change c['db']['db_url'] after the master has started", ) if self.config.mq['type'] != new_config.mq['type']: raise config.ConfigErrors([ "Cannot change c['mq']['type'] after the master has started", ]) return super().reconfigServiceWithBuildbotConfig(new_config) # informational methods def allSchedulers(self): return list(self.scheduler_manager) def getStatus(self): """ @rtype: L{buildbot.status.builder.Status} """ return self.status # state maintenance (private) def getObjectId(self): """ Return the object id for this master, for associating state with the master. @returns: ID, via Deferred """ # try to get the cached value if self._object_id is not None: return defer.succeed(self._object_id) # failing that, get it from the DB; multiple calls to this function # at the same time will not hurt d = self.db.state.getObjectId(self.name, "buildbot.master.BuildMaster") @d.addCallback def keep(id): self._object_id = id return id return d def _getState(self, name, default=None): "private wrapper around C{self.db.state.getState}" d = self.getObjectId() @d.addCallback def get(objectid): return self.db.state.getState(objectid, name, default) return d def _setState(self, name, value): "private wrapper around C{self.db.state.setState}" d = self.getObjectId() @d.addCallback def set(objectid): return self.db.state.setState(objectid, name, value) return d buildbot-2.6.0/master/buildbot/monkeypatches/000077500000000000000000000000001361162603000212725ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/monkeypatches/__init__.py000066400000000000000000000073221361162603000234070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import unittest from builtins import int from twisted.python import util def onlyOnce(fn): 'Set up FN to only run once within an interpreter instance' def wrap(*args, **kwargs): if hasattr(fn, 'called'): return fn.called = 1 return fn(*args, **kwargs) util.mergeFunctionMetadata(fn, wrap) return wrap # NOTE: all of these patches test for applicability *before* importing the # patch module. This will help cut down on unnecessary imports where the # patches are not needed, and also avoid problems with patches importing # private things in external libraries that no longer exist. @onlyOnce def patch_testcase_timeout(): # any test that should take more than 5 second should be annotated so. unittest.TestCase.timeout = 5 # but we know that the DB tests are very slow, so we increase a bit that value for # real database tests if os.environ.get("BUILDBOT_TEST_DB_URL", None) is not None: unittest.TestCase.timeout = 120 @onlyOnce def patch_servicechecks(): from buildbot.monkeypatches import servicechecks servicechecks.patch() @onlyOnce def patch_mysqlclient_warnings(): try: from _mysql_exceptions import Warning # MySQLdb.compat is only present in mysqlclient import MySQLdb.compat # noqa pylint: disable=unused-import,import-outside-toplevel except ImportError: return # workaround for https://twistedmatrix.com/trac/ticket/9005 # mysqlclient is easier to patch than twisted # we swap _mysql_exceptions.Warning arguments so that the code is in second place def patched_init(self, *args): if isinstance(args[0], int): super(Warning, self).__init__("{} {}".format(args[0], args[1])) else: super(Warning, self).__init__(*args) Warning.__init__ = patched_init @onlyOnce def patch_decorators(): from buildbot.monkeypatches import decorators decorators.patch() @onlyOnce def patch_config_for_unit_tests(): from buildbot import config # by default, buildbot.config warns about not configured buildbotNetUsageData. # its important for users to not leak information, but unneeded and painful for tests config._in_unit_tests = True @onlyOnce def patch_unittest_testcase(): from twisted.trial.unittest import TestCase # In Python 3.2, # - assertRaisesRegexp() was renamed to assertRaisesRegex(), # and assertRaisesRegexp() was deprecated. # - assertRegexpMatches() was renamed to assertRegex() # and assertRegexpMatches() was deprecated. if not getattr(TestCase, "assertRaisesRegex", None): TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp if not getattr(TestCase, "assertRegex", None): TestCase.assertRegex = TestCase.assertRegexpMatches def patch_all(for_tests=False): if for_tests: patch_servicechecks() patch_testcase_timeout() patch_decorators() patch_mysqlclient_warnings() patch_config_for_unit_tests() patch_unittest_testcase() buildbot-2.6.0/master/buildbot/monkeypatches/decorators.py000066400000000000000000000021231361162603000240070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import util def patch_noargs_decorator(decorator): def new_decorator(func): wrapper = decorator(func) wrapper.__wrapped__ = func return wrapper util.mergeFunctionMetadata(decorator, new_decorator) return new_decorator def patch(): defer.inlineCallbacks = patch_noargs_decorator(defer.inlineCallbacks) buildbot-2.6.0/master/buildbot/monkeypatches/servicechecks.py000066400000000000000000000024571361162603000244750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def patch(): """ Patch startService and stopService so that they check the previous state first. (used for debugging only) """ from twisted.application.service import Service old_startService = Service.startService old_stopService = Service.stopService def startService(self): assert not self.running, "%r already running" % (self,) return old_startService(self) def stopService(self): assert self.running, "%r already stopped" % (self,) return old_stopService(self) Service.startService = startService Service.stopService = stopService buildbot-2.6.0/master/buildbot/mq/000077500000000000000000000000001361162603000170355ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/mq/__init__.py000066400000000000000000000000001361162603000211340ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/mq/base.py000066400000000000000000000047721361162603000203330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import deferwaiter from buildbot.util import service class MQBase(service.AsyncService): name = 'mq-implementation' def __init__(self): super().__init__() self._deferwaiter = deferwaiter.DeferWaiter() @defer.inlineCallbacks def stopService(self): yield self._deferwaiter.wait() yield super().stopService() @defer.inlineCallbacks def waitUntilEvent(self, filter, check_callback): d = defer.Deferred() buildCompleteConsumer = yield self.startConsuming( lambda key, value: d.callback((key, value)), filter) check = yield check_callback() # we only wait if the check callback return true if not check: res = yield d else: res = None yield buildCompleteConsumer.stopConsuming() return res def invokeQref(self, qref, routingKey, data): self._deferwaiter.add(qref.invoke(routingKey, data)) class QueueRef: __slots__ = ['callback'] def __init__(self, callback): self.callback = callback def invoke(self, routing_key, data): # Potentially returns a Deferred if not self.callback: return None try: x = self.callback(routing_key, data) except Exception: log.err(failure.Failure(), 'while invoking %r' % (self.callback,)) return None if isinstance(x, defer.Deferred): x.addErrback(log.err, 'while invoking %r' % (self.callback,)) return x def stopConsuming(self): # This method may return a Deferred. # subclasses should set self.callback to None in this method. raise NotImplementedError buildbot-2.6.0/master/buildbot/mq/connector.py000066400000000000000000000054331361162603000214060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python.reflect import namedObject from buildbot.util import service class MQConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): classes = { 'simple': { 'class': "buildbot.mq.simple.SimpleMQ", 'keys': set(['debug']), }, 'wamp': { 'class': "buildbot.mq.wamp.WampMQ", 'keys': set(["router_url", "realm", "wamp_debug_level"]), }, } name = 'mq' def __init__(self): super().__init__() self.impl = None # set in setup self.impl_type = None # set in setup @defer.inlineCallbacks def setup(self): assert not self.impl # imports are done locally so that we don't try to import # implementation-specific modules unless they're required. typ = self.master.config.mq['type'] assert typ in self.classes # this is checked by MasterConfig self.impl_type = typ cls = namedObject(self.classes[typ]['class']) self.impl = cls() # set up the impl as a child service yield self.impl.setServiceParent(self) # configure it (early) self.impl.reconfigServiceWithBuildbotConfig(self.master.config) # copy the methods onto this object for ease of access self.produce = self.impl.produce self.startConsuming = self.impl.startConsuming self.waitUntilEvent = self.impl.waitUntilEvent def reconfigServiceWithBuildbotConfig(self, new_config): # double-check -- the master ensures this in config checks assert self.impl_type == new_config.mq['type'] return super().reconfigServiceWithBuildbotConfig(new_config) def produce(self, routing_key, data): # will be patched after configuration to point to the running # implementation's method raise NotImplementedError def startConsuming(self, callback, filter, persistent_name=None): # will be patched after configuration to point to the running # implementation's method raise NotImplementedError buildbot-2.6.0/master/buildbot/mq/simple.py000066400000000000000000000063551361162603000207110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pprint from twisted.internet import defer from twisted.python import log from buildbot.mq import base from buildbot.util import service from buildbot.util import tuplematch class SimpleMQ(service.ReconfigurableServiceMixin, base.MQBase): def __init__(self): super().__init__() self.qrefs = [] self.persistent_qrefs = {} self.debug = False def reconfigServiceWithBuildbotConfig(self, new_config): self.debug = new_config.mq.get('debug', False) return super().reconfigServiceWithBuildbotConfig(new_config) def produce(self, routingKey, data): if self.debug: log.msg("MSG: %s\n%s" % (routingKey, pprint.pformat(data))) for qref in self.qrefs: if tuplematch.matchTuple(routingKey, qref.filter): self.invokeQref(qref, routingKey, data) def startConsuming(self, callback, filter, persistent_name=None): if any(not isinstance(k, str) and k is not None for k in filter): raise AssertionError("%s is not a filter" % (filter,)) if persistent_name: if persistent_name in self.persistent_qrefs: qref = self.persistent_qrefs[persistent_name] qref.startConsuming(callback) else: qref = PersistentQueueRef(self, callback, filter) self.qrefs.append(qref) self.persistent_qrefs[persistent_name] = qref else: qref = QueueRef(self, callback, filter) self.qrefs.append(qref) return defer.succeed(qref) class QueueRef(base.QueueRef): __slots__ = ['mq', 'filter'] def __init__(self, mq, callback, filter): super().__init__(callback) self.mq = mq self.filter = filter def stopConsuming(self): self.callback = None try: self.mq.qrefs.remove(self) except ValueError: pass class PersistentQueueRef(QueueRef): __slots__ = ['active', 'queue'] def __init__(self, mq, callback, filter): super().__init__(mq, callback, filter) self.queue = [] def startConsuming(self, callback): self.callback = callback self.active = True # invoke for every message that was missed queue, self.queue = self.queue, [] for routingKey, data in queue: self.invoke(routingKey, data) def stopConsuming(self): self.callback = self.addToQueue self.active = False def addToQueue(self, routingKey, data): self.queue.append((routingKey, data)) buildbot-2.6.0/master/buildbot/mq/wamp.py000066400000000000000000000076411361162603000203630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from autobahn.wamp.exception import TransportLost from autobahn.wamp.types import PublishOptions from autobahn.wamp.types import SubscribeOptions from twisted.internet import defer from twisted.python import log from buildbot.mq import base from buildbot.util import service from buildbot.util import toJson class WampMQ(service.ReconfigurableServiceMixin, base.MQBase): NAMESPACE = "org.buildbot.mq" def produce(self, routingKey, data): d = self._produce(routingKey, data) d.addErrback( log.err, "Problem while producing message on topic " + repr(routingKey)) @classmethod def messageTopic(cls, routingKey): def ifNone(v, default): return default if v is None else v # replace None values by "" in routing key routingKey = [ifNone(key, "") for key in routingKey] # then join them with "dot", and add the prefix return cls.NAMESPACE + "." + ".".join(routingKey) @classmethod def routingKeyFromMessageTopic(cls, topic): # just split the topic, and remove the NAMESPACE prefix return tuple(topic[len(WampMQ.NAMESPACE) + 1:].split(".")) def _produce(self, routingKey, data): _data = json.loads(json.dumps(data, default=toJson)) options = PublishOptions(exclude_me=False) return self.master.wamp.publish(self.messageTopic(routingKey), _data, options=options) def startConsuming(self, callback, _filter, persistent_name=None): if persistent_name is not None: log.err('wampmq: persistent queues are not persisted: %s %s' % (persistent_name, _filter)) qr = QueueRef(self, callback) self._startConsuming(qr, callback, _filter) return defer.succeed(qr) def _startConsuming(self, qr, callback, _filter, persistent_name=None): return qr.subscribe(self.master.wamp, self, _filter) class QueueRef(base.QueueRef): def __init__(self, mq, callback): super().__init__(callback) self.unreg = None self.mq = mq @defer.inlineCallbacks def subscribe(self, connector_service, wamp_service, _filter): self.filter = _filter self.emulated = False options = dict(details_arg=str('details')) if None in _filter: options["match"] = "wildcard" options = SubscribeOptions(**options) _filter = WampMQ.messageTopic(_filter) self.unreg = yield connector_service.subscribe(self.wampInvoke, _filter, options=options) if self.callback is None: yield self.stopConsuming() def wampInvoke(self, msg, details): if details.topic is not None: # in the case of a wildcard, wamp router sends the topic topic = WampMQ.routingKeyFromMessageTopic(details.topic) else: # in the case of an exact match, then we can use our own topic topic = self.filter self.mq.invokeQref(self, topic, msg) @defer.inlineCallbacks def stopConsuming(self): self.callback = None if self.unreg is not None: unreg = self.unreg self.unreg = None try: yield unreg.unsubscribe() except TransportLost: pass buildbot-2.6.0/master/buildbot/newsfragments/000077500000000000000000000000001361162603000213035ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/newsfragments/README.txt000066400000000000000000000016461361162603000230100ustar00rootroot00000000000000This is the directory for news fragments used by towncrier: https://github.com/hawkowl/towncrier You create a news fragment in this directory when you make a change, and the file gets removed from this directory when the news is published. towncrier has a few standard types of news fragments, signified by the file extension. These are: .feature: Signifying a new feature. .bugfix: Signifying a bug fix. .doc: Signifying a documentation improvement. .removal: Signifying a deprecation or removal of public API. The core of the filename can be the fixed issue number of any unique text relative to your work. Buildbot project does not require a tracking ticket to be made for each contribution even if this is appreciated. Please point to the trac bug using syntax: (:bug:`NNN`) Please point to the github bug using syntax: (:issue:`NNN`) please point to classes using syntax: :py:class:`~buildbot.reporters.http.HttpStatusBase` buildbot-2.6.0/master/buildbot/pbmanager.py000066400000000000000000000167431361162603000207410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.application import strports from twisted.cred import checkers from twisted.cred import credentials from twisted.cred import error from twisted.cred import portal from twisted.internet import defer from twisted.python import log from twisted.spread import pb from zope.interface import implementer from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util import unicode2bytes from buildbot.util.eventual import eventually debug = False class PBManager(service.AsyncMultiService): """ A centralized manager for PB ports and authentication on them. Allows various pieces of code to request a (port, username) combo, along with a password and a perspective factory. """ def __init__(self): super().__init__() self.setName('pbmanager') self.dispatchers = {} @defer.inlineCallbacks def register(self, portstr, username, password, pfactory): """ Register a perspective factory PFACTORY to be executed when a PB connection arrives on PORTSTR with USERNAME/PASSWORD. Returns a Registration object which can be used to unregister later. """ # do some basic normalization of portstrs if isinstance(portstr, type(0)) or ':' not in portstr: portstr = "tcp:%s" % portstr reg = Registration(self, portstr, username) if portstr not in self.dispatchers: disp = self.dispatchers[portstr] = Dispatcher(portstr) yield disp.setServiceParent(self) else: disp = self.dispatchers[portstr] disp.register(username, password, pfactory) return reg @defer.inlineCallbacks def _unregister(self, registration): disp = self.dispatchers[registration.portstr] disp.unregister(registration.username) registration.username = None if not disp.users: disp = self.dispatchers[registration.portstr] del self.dispatchers[registration.portstr] yield disp.disownServiceParent() class Registration: def __init__(self, pbmanager, portstr, username): self.portstr = portstr "portstr this registration is active on" self.username = username "username of this registration" self.pbmanager = pbmanager def __repr__(self): return "" % \ (self.username, self.portstr) def unregister(self): """ Unregister this registration, removing the username from the port, and closing the port if there are no more users left. Returns a Deferred. """ return self.pbmanager._unregister(self) def getPort(self): """ Helper method for testing; returns the TCP port used for this registration, even if it was specified as 0 and thus allocated by the OS. """ disp = self.pbmanager.dispatchers[self.portstr] return disp.port.getHost().port @implementer(portal.IRealm, checkers.ICredentialsChecker) class Dispatcher(service.AsyncService): credentialInterfaces = [credentials.IUsernamePassword, credentials.IUsernameHashedPassword] def __init__(self, portstr): self.portstr = portstr self.users = {} # there's lots of stuff to set up for a PB connection! self.portal = portal.Portal(self) self.portal.registerChecker(self) self.serverFactory = pb.PBServerFactory(self.portal) self.serverFactory.unsafeTracebacks = True self.port = None def __repr__(self): return "" % \ (", ".join(list(self.users)), self.portstr) def startService(self): assert not self.port self.port = strports.listen(self.portstr, self.serverFactory) return super().startService() @defer.inlineCallbacks def stopService(self): # stop listening on the port when shut down assert self.port port, self.port = self.port, None yield defer.maybeDeferred(port.stopListening) yield super().stopService() def register(self, username, password, pfactory): if debug: log.msg("registering username '%s' on pb port %s: %s" % (username, self.portstr, pfactory)) if username in self.users: raise KeyError("username '%s' is already registered on PB port %s" % (username, self.portstr)) self.users[username] = (password, pfactory) def unregister(self, username): if debug: log.msg("unregistering username '%s' on pb port %s" % (username, self.portstr)) del self.users[username] # IRealm def requestAvatar(self, username, mind, interface): assert interface == pb.IPerspective username = bytes2unicode(username) if username not in self.users: d = defer.succeed(None) # no perspective else: _, afactory = self.users.get(username) d = defer.maybeDeferred(afactory, mind, username) # check that we got a perspective @d.addCallback def check(persp): if not persp: raise ValueError("no perspective for '%s'" % username) return persp # call the perspective's attached(mind) @d.addCallback def call_attached(persp): d = defer.maybeDeferred(persp.attached, mind) d.addCallback(lambda _: persp) # keep returning the perspective return d # return the tuple requestAvatar is expected to return @d.addCallback def done(persp): return (pb.IPerspective, persp, lambda: persp.detached(mind)) return d # ICredentialsChecker @defer.inlineCallbacks def requestAvatarId(self, creds): p = Properties() p.master = self.master username = bytes2unicode(creds.username) try: yield self.master.initLock.acquire() if username in self.users: password, _ = self.users[username] password = yield p.render(password) matched = yield defer.maybeDeferred( creds.checkPassword, unicode2bytes(password)) if not matched: log.msg("invalid login from user '{}'".format(username)) raise error.UnauthorizedLogin() return creds.username log.msg("invalid login from unknown user '{}'".format(username)) raise error.UnauthorizedLogin() finally: # brake the callback stack by returning to the reactor # before waking up other waiters eventually(self.master.initLock.release) buildbot-2.6.0/master/buildbot/pbutil.py000066400000000000000000000143651361162603000203020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Base classes handy for use with PB clients. """ from twisted.internet import protocol from twisted.python import log from twisted.spread import pb from twisted.spread.pb import PBClientFactory from buildbot.util import bytes2unicode class NewCredPerspective(pb.Avatar): def attached(self, mind): return self def detached(self, mind): pass class ReconnectingPBClientFactory(PBClientFactory, protocol.ReconnectingClientFactory): """Reconnecting client factory for PB brokers. Like PBClientFactory, but if the connection fails or is lost, the factory will attempt to reconnect. Instead of using f.getRootObject (which gives a Deferred that can only be fired once), override the gotRootObject method. Instead of using the newcred f.login (which is also one-shot), call f.startLogin() with the credentials and client, and override the gotPerspective method. Instead of using the oldcred f.getPerspective (also one-shot), call f.startGettingPerspective() with the same arguments, and override gotPerspective. gotRootObject and gotPerspective will be called each time the object is received (once per successful connection attempt). You will probably want to use obj.notifyOnDisconnect to find out when the connection is lost. If an authorization error occurs, failedToGetPerspective() will be invoked. To use me, subclass, then hand an instance to a connector (like TCPClient). """ def __init__(self): super().__init__() self._doingLogin = False self._doingGetPerspective = False def clientConnectionFailed(self, connector, reason): super().clientConnectionFailed(connector, reason) # Twisted-1.3 erroneously abandons the connection on non-UserErrors. # To avoid this bug, don't upcall, and implement the correct version # of the method here. if self.continueTrying: self.connector = connector self.retry() def clientConnectionLost(self, connector, reason): super().clientConnectionLost(connector, reason, reconnecting=True) RCF = protocol.ReconnectingClientFactory RCF.clientConnectionLost(self, connector, reason) def clientConnectionMade(self, broker): self.resetDelay() super().clientConnectionMade(broker) if self._doingLogin: self.doLogin(self._root) if self._doingGetPerspective: self.doGetPerspective(self._root) self.gotRootObject(self._root) # oldcred methods def getPerspective(self, *args): raise RuntimeError("getPerspective is one-shot: use startGettingPerspective instead") def startGettingPerspective(self, username, password, serviceName, perspectiveName=None, client=None): self._doingGetPerspective = True if perspectiveName is None: perspectiveName = username self._oldcredArgs = (username, password, serviceName, perspectiveName, client) def doGetPerspective(self, root): # oldcred getPerspective() (username, password, serviceName, perspectiveName, client) = self._oldcredArgs d = self._cbAuthIdentity(root, username, password) d.addCallback(self._cbGetPerspective, serviceName, perspectiveName, client) d.addCallbacks(self.gotPerspective, self.failedToGetPerspective) # newcred methods def login(self, *args): raise RuntimeError("login is one-shot: use startLogin instead") def startLogin(self, credentials, client=None): self._credentials = credentials self._client = client self._doingLogin = True def doLogin(self, root): # newcred login() d = self._cbSendUsername(root, self._credentials.username, self._credentials.password, self._client) d.addCallbacks(self.gotPerspective, self.failedToGetPerspective) # methods to override def gotPerspective(self, perspective): """The remote avatar or perspective (obtained each time this factory connects) is now available.""" def gotRootObject(self, root): """The remote root object (obtained each time this factory connects) is now available. This method will be called each time the connection is established and the object reference is retrieved.""" def failedToGetPerspective(self, why): """The login process failed, most likely because of an authorization failure (bad password), but it is also possible that we lost the new connection before we managed to send our credentials. """ log.msg("ReconnectingPBClientFactory.failedToGetPerspective") if why.check(pb.PBConnectionLost): log.msg("we lost the brand-new connection") # retrying might help here, let clientConnectionLost decide return # probably authorization self.stopTrying() # logging in harder won't help log.err(why) def decode(data, encoding='utf-8', errors='strict'): """We need to convert a dictionary where keys and values are bytes, to unicode strings. This happens when a Python 2 worker sends a dictionary back to a Python 3 master. """ data_type = type(data) if data_type == bytes: return bytes2unicode(data, encoding, errors) if data_type in (dict, list, tuple): if data_type == dict: data = data.items() return data_type(map(decode, data)) return data buildbot-2.6.0/master/buildbot/plugins/000077500000000000000000000000001361162603000201015ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/plugins/__init__.py000066400000000000000000000031241361162603000222120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Buildbot plugin infrastructure """ from buildbot import statistics from buildbot.interfaces import IBuildStep from buildbot.interfaces import IChangeSource from buildbot.interfaces import IScheduler from buildbot.interfaces import IWorker from buildbot.plugins.db import get_plugins __all__ = [ 'changes', 'schedulers', 'steps', 'util', 'reporters', 'statistics', 'worker', 'secrets', 'webhooks' ] # Names here match the names of the corresponding Buildbot module, hence # 'changes', 'schedulers', but 'buildslave' changes = get_plugins('changes', IChangeSource) schedulers = get_plugins('schedulers', IScheduler) steps = get_plugins('steps', IBuildStep) util = get_plugins('util', None) reporters = get_plugins('reporters', None) secrets = get_plugins('secrets', None) webhooks = get_plugins('webhooks', None) # Worker entry point for new/updated plugins. worker = get_plugins('worker', IWorker) buildbot-2.6.0/master/buildbot/plugins/db.py000066400000000000000000000226601361162603000210460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # pylint: disable=C0111 import traceback from pkg_resources import iter_entry_points from zope.interface import Invalid from zope.interface.verify import verifyClass from buildbot.errors import PluginDBError from buildbot.interfaces import IPlugin # Base namespace for Buildbot specific plugins _NAMESPACE_BASE = 'buildbot' class _PluginEntry: def __init__(self, group, entry, loader): self._group = group self._entry = entry self._value = None self._loader = loader def load(self): if self._value is None: self._value = self._loader(self._entry) @property def group(self): return self._group @property def name(self): return self._entry.name @property def info(self): dist = self._entry.dist return (dist.project_name, dist.version) def __ne__(self, other): return self.info != other.info @property def value(self): self.load() return self._value class _PluginEntryProxy(_PluginEntry): """Proxy for specific entry with custom group name. Used to provided access to the same entry from different namespaces. """ def __init__(self, group, plugin_entry): assert isinstance(plugin_entry, _PluginEntry) self._plugin_entry = plugin_entry self._group = group def load(self): self._plugin_entry.load() @property def group(self): return self._group @property def name(self): return self._plugin_entry.name @property def info(self): return self._plugin_entry.info @property def value(self): return self._plugin_entry.value class _NSNode: # pylint: disable=W0212 def __init__(self): self._children = dict() def load(self): for child in self._children.values(): child.load() def add(self, name, entry): assert isinstance(name, str) and isinstance(entry, _PluginEntry) self._add(name, entry) def _add(self, name, entry): path = name.split('.', 1) key = path.pop(0) is_leaf = not path child = self._children.get(key) if is_leaf: if child is not None: assert isinstance(child, _PluginEntry) if child != entry: raise PluginDBError('Duplicate entry point for "%s:%s".\n' ' Previous definition %s\n' ' This definition %s' % (child.group, child.name, child.info, entry.info)) else: self._children[key] = entry else: if child is None: child = _NSNode() assert isinstance(child, _NSNode) child._add(path[0], entry) self._children[key] = child def __getattr__(self, name): child = self._children.get(name) if child is None: raise PluginDBError('Unknown component name: %s' % name) if isinstance(child, _PluginEntry): return child.value return child def info(self, name): assert isinstance(name, str) return self._get(name).info def get(self, name): assert isinstance(name, str) return self._get(name).value def _get(self, name): path = name.split('.', 1) key = path.pop(0) is_leaf = not path child = self._children.get(key) if isinstance(child, _PluginEntry): if not is_leaf: raise PluginDBError('Excessive namespace specification: %s' % path[0]) return child elif child is None: raise PluginDBError('Unknown component name: %s' % name) else: return child._get(path[0]) def _info_all(self): result = [] for key, child in self._children.items(): if isinstance(child, _PluginEntry): result.append((key, child.info)) else: result.extend([ ('%s.%s' % (key, name), value) for name, value in child.info_all().items() ]) return result def info_all(self): return dict(self._info_all()) class _Plugins: """ represent plugins within a namespace """ def __init__(self, namespace, interface=None, check_extras=True): if interface is not None: assert interface.isOrExtends(IPlugin) self._group = '%s.%s' % (_NAMESPACE_BASE, namespace) self._interface = interface self._check_extras = check_extras self._real_tree = None def _load_entry(self, entry): # pylint: disable=W0703 if self._check_extras: try: entry.require() except Exception as err: raise PluginDBError('Requirements are not satisfied ' 'for %s:%s: %s' % (self._group, entry.name, str(err))) try: result = entry.load() except Exception as err: # log full traceback of the bad entry to help support traceback.print_exc() raise PluginDBError('Unable to load %s:%s: %s' % (self._group, entry.name, str(err))) if self._interface: try: verifyClass(self._interface, result) except Invalid as err: raise PluginDBError('Plugin %s:%s does not implement %s: %s' % (self._group, entry.name, self._interface.__name__, str(err))) return result @property def _tree(self): if self._real_tree is None: self._real_tree = _NSNode() for entry in iter_entry_points(self._group): self._real_tree.add(entry.name, _PluginEntry(self._group, entry, self._load_entry)) return self._real_tree def load(self): self._tree.load() def info_all(self): return self._tree.info_all() @property def names(self): # Expensive operation return list(self.info_all()) def info(self, name): """ get information about a particular plugin if known in this namespace """ return self._tree.info(name) def __contains__(self, name): """ check if the given name is available as a plugin """ try: return not isinstance(self._tree.get(name), _NSNode) except PluginDBError: return False def get(self, name): """ get an instance of the plugin with the given name """ return self._tree.get(name) def __getattr__(self, name): try: return getattr(self._tree, name) except PluginDBError as err: raise AttributeError(str(err)) class _PluginDB: """ Plugin infrastructure support for Buildbot """ def __init__(self): self._namespaces = dict() def add_namespace(self, namespace, interface=None, check_extras=True, load_now=False): """ register given namespace in global database of plugins in case it's already registered, return the registration """ tempo = self._namespaces.get(namespace) if tempo is None: tempo = _Plugins(namespace, interface, check_extras) self._namespaces[namespace] = tempo if load_now: tempo.load() return tempo @property def namespaces(self): """ get a list of registered namespaces """ return list(self._namespaces) def info(self): """ get information about all plugins in registered namespaces """ result = dict() for name, namespace in self._namespaces.items(): result[name] = namespace.info_all() return result _DB = _PluginDB() def namespaces(): """ provide information about known namespaces """ return _DB.namespaces def info(): """ provide information about all known plugins format of the output: {, { {: (, lock.config_version: lock.updateFromLockId(lockid, config_version) return lock def getLockFromLockAccess(self, access, config_version): # Convert a lock-access object into an actual Lock instance. if not isinstance(access, locks.LockAccess): # Buildbot 0.7.7 compatibility: user did not specify access access = access.defaultAccess() return self.getLockByID(access.lockid, config_version) @defer.inlineCallbacks def getLockFromLockAccesses(self, accesses, config_version): # converts locks to their real forms locks = yield defer.gatherResults([self.getLockFromLockAccess(access, config_version) for access in accesses]) return zip(locks, accesses) class BotMaster(service.ReconfigurableServiceMixin, service.AsyncMultiService, LockRetrieverMixin): """This is the master-side service which manages remote buildbot workers. It provides them with Workers, and distributes build requests to them.""" debug = 0 name = "botmaster" def __init__(self): super().__init__() self.builders = {} self.builderNames = [] # builders maps Builder names to instances of bb.p.builder.Builder, # which is the master-side object that defines and controls a build. self.watchers = {} self.shuttingDown = False # subscription to new build requests self.buildrequest_consumer = None # a distributor for incoming build requests; see below self.brd = BuildRequestDistributor(self) self.brd.setServiceParent(self) @defer.inlineCallbacks def cleanShutdown(self, quickMode=False, stopReactor=True): """Shut down the entire process, once all currently-running builds are complete. quickMode will mark all builds as retry (except the ones that were triggered) """ if self.shuttingDown: return log.msg("Initiating clean shutdown") self.shuttingDown = True # first, stop the distributor; this will finish any ongoing scheduling # operations before firing yield self.brd.disownServiceParent() # Double check that we're still supposed to be shutting down # The shutdown may have been cancelled! while self.shuttingDown: if quickMode: for builder in self.builders.values(): # As we stop the builds, builder.building might change during loop # so we need to copy the list for build in list(builder.building): # if build is waited for then this is a sub-build, so # no need to retry it if sum(br.waitedFor for br in build.requests): results = CANCELLED else: results = RETRY is_building = build.workerforbuilder.state == States.BUILDING build.stopBuild("Master Shutdown", results) if not is_building: # if it is not building, then it must be a latent worker # which is substantiating. Cancel it. build.workerforbuilder.worker.insubstantiate() # then wait for all builds to finish dl = [] for builder in self.builders.values(): for build in builder.building: # build may be waiting for ping to worker to succeed which # may never happen if the connection to worker was broken # without TCP connection being severed build.workerforbuilder.abortPingIfAny() dl.append(build.waitUntilFinished()) if not dl: log.msg("No running jobs, starting shutdown immediately") else: log.msg("Waiting for %i build(s) to finish" % len(dl)) yield defer.DeferredList(dl) # Check that there really aren't any running builds n = 0 for builder in self.builders.values(): if builder.building: num_builds = len(builder.building) log.msg("Builder %s has %i builds running" % (builder, num_builds)) n += num_builds if n > 0: log.msg( "Not shutting down, there are %i builds running" % n) log.msg("Trying shutdown sequence again") yield util.asyncSleep(1) else: if stopReactor and self.shuttingDown: log.msg("Stopping reactor") self.master.reactor.stop() break if not self.shuttingDown: yield self.brd.setServiceParent(self) def cancelCleanShutdown(self): """Cancel a clean shutdown that is already in progress, if any""" if not self.shuttingDown: return log.msg("Cancelling clean shutdown") self.shuttingDown = False @metrics.countMethod('BotMaster.workerLost()') def workerLost(self, bot): metrics.MetricCountEvent.log("BotMaster.attached_workers", -1) for name, b in self.builders.items(): if bot.workername in b.config.workernames: b.detached(bot) @metrics.countMethod('BotMaster.getBuildersForWorker()') def getBuildersForWorker(self, workername): return [b for b in self.builders.values() if workername in b.config.workernames] def getBuildernames(self): return self.builderNames def getBuilders(self): return list(self.builders.values()) @defer.inlineCallbacks def startService(self): @defer.inlineCallbacks def buildRequestAdded(key, msg): builderid = msg['builderid'] buildername = None # convert builderid to buildername for builder in self.builders.values(): if builderid == (yield builder.getBuilderId()): buildername = builder.name break if buildername: self.maybeStartBuildsForBuilder(buildername) # consume both 'new' and 'unclaimed' build requests startConsuming = self.master.mq.startConsuming self.buildrequest_consumer_new = yield startConsuming( buildRequestAdded, ('buildrequests', None, "new")) self.buildrequest_consumer_unclaimed = yield startConsuming( buildRequestAdded, ('buildrequests', None, 'unclaimed')) yield super().startService() @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): timer = metrics.Timer("BotMaster.reconfigServiceWithBuildbotConfig") timer.start() # reconfigure builders yield self.reconfigServiceBuilders(new_config) # call up yield super().reconfigServiceWithBuildbotConfig(new_config) # try to start a build for every builder; this is necessary at master # startup, and a good idea in any other case self.maybeStartBuildsForAllBuilders() timer.stop() @defer.inlineCallbacks def reconfigServiceBuilders(self, new_config): timer = metrics.Timer("BotMaster.reconfigServiceBuilders") timer.start() # arrange builders by name old_by_name = {b.name: b for b in list(self) if isinstance(b, Builder)} old_set = set(old_by_name) new_by_name = {bc.name: bc for bc in new_config.builders} new_set = set(new_by_name) # calculate new builders, by name, and removed builders removed_names, added_names = util.diffSets(old_set, new_set) if removed_names or added_names: log.msg("adding %d new builders, removing %d" % (len(added_names), len(removed_names))) for n in removed_names: builder = old_by_name[n] del self.builders[n] builder.master = None builder.botmaster = None yield defer.maybeDeferred(builder.disownServiceParent) for n in added_names: builder = Builder(n) self.builders[n] = builder builder.botmaster = self builder.master = self.master yield builder.setServiceParent(self) self.builderNames = list(self.builders) yield self.master.data.updates.updateBuilderList( self.master.masterid, [util.bytes2unicode(n) for n in self.builderNames]) metrics.MetricCountEvent.log("num_builders", len(self.builders), absolute=True) timer.stop() def stopService(self): if self.buildrequest_consumer_new: self.buildrequest_consumer_new.stopConsuming() self.buildrequest_consumer_new = None if self.buildrequest_consumer_unclaimed: self.buildrequest_consumer_unclaimed.stopConsuming() self.buildrequest_consumer_unclaimed = None return super().stopService() def maybeStartBuildsForBuilder(self, buildername): """ Call this when something suggests that a particular builder may now be available to start a build. @param buildername: the name of the builder """ self.brd.maybeStartBuildsOn([buildername]) def maybeStartBuildsForWorker(self, worker_name): """ Call this when something suggests that a particular worker may now be available to start a build. @param worker_name: the name of the worker """ builders = self.getBuildersForWorker(worker_name) self.brd.maybeStartBuildsOn([b.name for b in builders]) def maybeStartBuildsForAllBuilders(self): """ Call this when something suggests that this would be a good time to start some builds, but nothing more specific. """ self.brd.maybeStartBuildsOn(self.builderNames) buildbot-2.6.0/master/buildbot/process/build.py000066400000000000000000000756231361162603000215640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from functools import reduce from twisted.internet import defer from twisted.internet import error from twisted.python import components from twisted.python import failure from twisted.python import log from twisted.python.failure import Failure from zope.interface import implementer from buildbot import interfaces from buildbot.process import buildstep from buildbot.process import metrics from buildbot.process import properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import computeResultAndTermination from buildbot.process.results import statusToString from buildbot.process.results import worst_status from buildbot.reporters.utils import getURLForBuild from buildbot.util import bytes2unicode from buildbot.util.eventual import eventually @implementer(interfaces.IBuildControl) class Build(properties.PropertiesMixin): """I represent a single build by a single worker. Specialized Builders can use subclasses of Build to hold status information unique to those build processes. I control B{how} the build proceeds. The actual build is broken up into a series of steps, saved in the .buildSteps[] array as a list of L{buildbot.process.step.BuildStep} objects. Each step is a single remote command, possibly a shell command. During the build, I put status information into my C{BuildStatus} gatherer. After the build, I go away. I can be used by a factory by setting buildClass on L{buildbot.process.factory.BuildFactory} @ivar requests: the list of L{BuildRequest}s that triggered me @ivar build_status: the L{buildbot.status.build.BuildStatus} that collects our status """ VIRTUAL_BUILDERNAME_PROP = "virtual_builder_name" VIRTUAL_BUILDERDESCRIPTION_PROP = "virtual_builder_description" VIRTUAL_BUILDERTAGS_PROP = "virtual_builder_tags" workdir = "build" build_status = None reason = "changes" finished = False results = None stopped = False set_runtime_properties = True subs = None _sentinel = [] # used as a sentinel to indicate unspecified initial_value def __init__(self, requests): self.requests = requests self.locks = [] # build a source stamp self.sources = requests[0].mergeSourceStampsWith(requests[1:]) self.reason = requests[0].mergeReasons(requests[1:]) self.currentStep = None self.workerEnvironment = {} self.buildid = None self.number = None self.executedSteps = [] self.stepnames = {} self.terminate = False self._acquiringLock = None self._builderid = None # overall results, may downgrade after each step self.results = SUCCESS self.properties = properties.Properties() # tracks execution during the build finish phase self._locks_released = False self._build_finished = False # tracks the config version for locks self.config_version = None def setBuilder(self, builder): """ Set the given builder as our builder. @type builder: L{buildbot.process.builder.Builder} """ self.builder = builder self.master = builder.master self.config_version = builder.config_version @defer.inlineCallbacks def setLocks(self, lockList): self.locks = yield self.builder.botmaster.getLockFromLockAccesses(lockList, self.config_version) def setWorkerEnvironment(self, env): # TODO: remove once we don't have anything depending on this method or attribute # e.g., old-style steps (ShellMixin pulls the environment out of the # builder directly) self.workerEnvironment = env def getSourceStamp(self, codebase=''): for source in self.sources: if source.codebase == codebase: return source return None def getAllSourceStamps(self): return list(self.sources) @staticmethod def allChangesFromSources(sources): for s in sources: for c in s.changes: yield c def allChanges(self): return Build.allChangesFromSources(self.sources) def allFiles(self): # return a list of all source files that were changed files = [] for c in self.allChanges(): for f in c.files: files.append(f) return files def __repr__(self): return "" % ( self.builder.name, self.number, statusToString(self.results)) def blamelist(self): # Note that this algorithm is also implemented in buildbot.reporters.utils.getResponsibleUsersForBuild, # but using the data api. # it is important for the UI to have the blamelist easily available. # The best way is to make sure the owners property is set to full blamelist blamelist = [] for c in self.allChanges(): if c.who not in blamelist: blamelist.append(c.who) for source in self.sources: if source.patch: # Add patch author to blamelist blamelist.append(source.patch_info[0]) blamelist.sort() return blamelist def changesText(self): changetext = "" for c in self.allChanges(): changetext += "-" * 60 + "\n\n" + c.asText() + "\n" # consider sorting these by number return changetext def setStepFactories(self, step_factories): """Set a list of 'step factories', which are tuples of (class, kwargs), where 'class' is generally a subclass of step.BuildStep . These are used to create the Steps themselves when the Build starts (as opposed to when it is first created). By creating the steps later, their __init__ method will have access to things like build.allFiles() .""" self.stepFactories = list(step_factories) useProgress = True def getWorkerCommandVersion(self, command, oldversion=None): return self.workerforbuilder.getWorkerCommandVersion(command, oldversion) def getWorkerName(self): return self.workerforbuilder.worker.workername @staticmethod def setupPropertiesKnownBeforeBuildStarts(props, requests, builder, workerforbuilder): # Note that this function does not setup the 'builddir' worker property # It's not possible to know it until before the actual worker has # attached. # start with global properties from the configuration props.updateFromProperties(builder.master.config.properties) # from the SourceStamps, which have properties via Change sources = requests[0].mergeSourceStampsWith(requests[1:]) for change in Build.allChangesFromSources(sources): props.updateFromProperties(change.properties) # get any properties from requests (this is the path through which # schedulers will send us properties) for rq in requests: props.updateFromProperties(rq.properties) # get builder properties builder.setupProperties(props) # get worker properties # navigate our way back to the L{buildbot.worker.Worker} # object that came from the config, and get its properties workerforbuilder.worker.setupProperties(props) def setupOwnProperties(self): # now set some properties of our own, corresponding to the # build itself props = self.getProperties() props.setProperty("buildnumber", self.number, "Build") if self.sources and len(self.sources) == 1: # old interface for backwards compatibility source = self.sources[0] props.setProperty("branch", source.branch, "Build") props.setProperty("revision", source.revision, "Build") props.setProperty("repository", source.repository, "Build") props.setProperty("codebase", source.codebase, "Build") props.setProperty("project", source.project, "Build") def setupWorkerBuildirProperty(self, workerforbuilder): path_module = workerforbuilder.worker.path_module # navigate our way back to the L{buildbot.worker.Worker} # object that came from the config, and get its properties if workerforbuilder.worker.worker_basedir: builddir = path_module.join( bytes2unicode(workerforbuilder.worker.worker_basedir), bytes2unicode(self.builder.config.workerbuilddir)) self.setProperty("builddir", builddir, "Worker") def setupWorkerForBuilder(self, workerforbuilder): self.path_module = workerforbuilder.worker.path_module self.workername = workerforbuilder.worker.workername self.build_status.setWorkername(self.workername) @defer.inlineCallbacks def getBuilderId(self): if self._builderid is None: if self.hasProperty(self.VIRTUAL_BUILDERNAME_PROP): self._builderid = yield self.builder.getBuilderIdForName( self.getProperty(self.VIRTUAL_BUILDERNAME_PROP)) description = self.getProperty( self.VIRTUAL_BUILDERDESCRIPTION_PROP, self.builder.config.description) tags = self.getProperty( self.VIRTUAL_BUILDERTAGS_PROP, self.builder.config.tags) if type(tags) == type([]) and '_virtual_' not in tags: tags.append('_virtual_') self.master.data.updates.updateBuilderInfo(self._builderid, description, tags) else: self._builderid = yield self.builder.getBuilderId() return self._builderid @defer.inlineCallbacks def startBuild(self, build_status, workerforbuilder): """This method sets up the build, then starts it by invoking the first Step. It returns a Deferred which will fire when the build finishes. This Deferred is guaranteed to never errback.""" self.workerforbuilder = workerforbuilder self.conn = None worker = workerforbuilder.worker log.msg("%s.startBuild" % self) self.build_status = build_status # TODO: this will go away when build collapsing is implemented; until # then we just assign the build to the first buildrequest brid = self.requests[0].id builderid = yield self.getBuilderId() self.buildid, self.number = \ yield self.master.data.updates.addBuild( builderid=builderid, buildrequestid=brid, workerid=worker.workerid) self.stopBuildConsumer = yield self.master.mq.startConsuming(self.controlStopBuild, ("control", "builds", str(self.buildid), "stop")) # the preparation step counts the time needed for preparing the worker and getting the locks. # we cannot use a real step as we don't have a worker yet. self.preparation_step = buildstep.BuildStep(name="worker_preparation") self.preparation_step.setBuild(self) yield self.preparation_step.addStep() self.setupOwnProperties() # then narrow WorkerLocks down to the right worker self.locks = [(l.getLockForWorker(workerforbuilder.worker.workername), a) for l, a in self.locks] metrics.MetricCountEvent.log('active_builds', 1) # make sure properties are available to people listening on 'new' # events yield self._flushProperties(None) self.build_status.buildStarted(self) yield self.master.data.updates.setBuildStateString(self.buildid, 'starting') yield self.master.data.updates.generateNewBuildEvent(self.buildid) try: self.setupBuild() # create .steps except Exception: yield self.buildPreparationFailure(Failure(), "setupBuild") self.buildFinished(['Build.setupBuild', 'failed'], EXCEPTION) return # flush properties in the beginning of the build yield self._flushProperties(None) yield self.master.data.updates.setBuildStateString(self.buildid, 'preparing worker') try: ready_or_failure = yield workerforbuilder.prepare(self) except Exception: ready_or_failure = Failure() # If prepare returns True then it is ready and we start a build # If it returns failure then we don't start a new build. if ready_or_failure is not True: yield self.buildPreparationFailure(ready_or_failure, "worker_prepare") if self.stopped: self.buildFinished(["worker", "cancelled"], self.results) elif isinstance(ready_or_failure, Failure) and ready_or_failure.check(interfaces.LatentWorkerCannotSubstantiate): self.buildFinished(["worker", "cannot", "substantiate"], EXCEPTION) else: self.buildFinished(["worker", "not", "available"], RETRY) return # ping the worker to make sure they're still there. If they've # fallen off the map (due to a NAT timeout or something), this # will fail in a couple of minutes, depending upon the TCP # timeout. # # TODO: This can unnecessarily suspend the starting of a build, in # situations where the worker is live but is pushing lots of data to # us in a build. yield self.master.data.updates.setBuildStateString(self.buildid, 'pinging worker') log.msg("starting build %s.. pinging the worker %s" % (self, workerforbuilder)) try: ping_success_or_failure = yield workerforbuilder.ping() except Exception: ping_success_or_failure = Failure() if ping_success_or_failure is not True: yield self.buildPreparationFailure(ping_success_or_failure, "worker_ping") self.buildFinished(["worker", "not", "pinged"], RETRY) return self.conn = workerforbuilder.worker.conn # To retrieve the builddir property, the worker must be attached as we # depend on its path_module. Latent workers become attached only after # preparing them, so we can't setup the builddir property earlier like # the rest of properties self.setupWorkerBuildirProperty(workerforbuilder) self.setupWorkerForBuilder(workerforbuilder) self.subs = self.conn.notifyOnDisconnect(self.lostRemote) # tell the remote that it's starting a build, too try: yield self.conn.remoteStartBuild(self.builder.name) except Exception: yield self.buildPreparationFailure(Failure(), "start_build") self.buildFinished(["worker", "not", "building"], RETRY) return yield self.master.data.updates.setBuildStateString(self.buildid, 'acquiring locks') yield self.acquireLocks() yield self.master.data.updates.setStepStateString(self.preparation_step.stepid, "worker ready") yield self.master.data.updates.finishStep(self.preparation_step.stepid, SUCCESS, False) yield self.master.data.updates.setBuildStateString(self.buildid, 'building') # start the sequence of steps self.startNextStep() @defer.inlineCallbacks def buildPreparationFailure(self, why, state_string): log.err(why, "while " + state_string) self.workerforbuilder.worker.putInQuarantine() if isinstance(why, failure.Failure): yield self.preparation_step.addLogWithFailure(why) yield self.master.data.updates.setStepStateString(self.preparation_step.stepid, "error while " + state_string) yield self.master.data.updates.finishStep(self.preparation_step.stepid, EXCEPTION, False) @staticmethod def _canAcquireLocks(lockList, workerforbuilder): for lock, access in lockList: worker_lock = lock.getLockForWorker( workerforbuilder.worker.workername) if not worker_lock.isAvailable(None, access): return False return True def acquireLocks(self, res=None): self._acquiringLock = None if not self.locks: return defer.succeed(None) if self.stopped: return defer.succeed(None) log.msg("acquireLocks(build %s, locks %s)" % (self, self.locks)) for lock, access in self.locks: if not lock.isAvailable(self, access): log.msg("Build %s waiting for lock %s" % (self, lock)) d = lock.waitUntilMaybeAvailable(self, access) d.addCallback(self.acquireLocks) self._acquiringLock = (lock, access, d) return d # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) return defer.succeed(None) def setUniqueStepName(self, step): # If there are any name collisions, we add a count to the loser # until it is unique. name = step.name if name in self.stepnames: count = self.stepnames[name] count += 1 self.stepnames[name] = count name = "%s_%d" % (step.name, count) else: self.stepnames[name] = 0 step.name = name def setupBuildSteps(self, step_factories): steps = [] for factory in step_factories: step = factory.buildStep() step.setBuild(self) step.setWorker(self.workerforbuilder.worker) self.setUniqueStepName(step) steps.append(step) if self.useProgress: step.setupProgress() return steps def setupBuild(self): # create the actual BuildSteps. self.steps = self.setupBuildSteps(self.stepFactories) owners = set(self.blamelist()) # gather owners from build requests owners.update({r.properties['owner'] for r in self.requests if "owner" in r.properties}) if owners: self.setProperty('owners', sorted(owners), 'Build') self.text = [] # list of text string lists (text2) def _addBuildSteps(self, step_factories): factories = [interfaces.IBuildStepFactory(s) for s in step_factories] return self.setupBuildSteps(factories) def addStepsAfterCurrentStep(self, step_factories): # Add the new steps after the step that is running. # The running step has already been popped from self.steps self.steps[0:0] = self._addBuildSteps(step_factories) def addStepsAfterLastStep(self, step_factories): # Add the new steps to the end. self.steps.extend(self._addBuildSteps(step_factories)) def getNextStep(self): """This method is called to obtain the next BuildStep for this build. When it returns None (or raises a StopIteration exception), the build is complete.""" if not self.steps: return None if not self.conn: return None if self.terminate or self.stopped: # Run any remaining alwaysRun steps, and skip over the others while True: s = self.steps.pop(0) if s.alwaysRun: return s if not self.steps: return None else: return self.steps.pop(0) def startNextStep(self): try: s = self.getNextStep() except StopIteration: s = None if not s: return self.allStepsDone() self.executedSteps.append(s) self.currentStep = s d = defer.maybeDeferred(s.startStep, self.conn) d.addBoth(self._flushProperties) d.addCallback(self._stepDone, s) d.addErrback(self.buildException) @defer.inlineCallbacks def _flushProperties(self, results): # `results` is just passed on to the next callback yield self.master.data.updates.setBuildProperties(self.buildid, self) return results @defer.inlineCallbacks def _stepDone(self, results, step): self.currentStep = None if self.finished: return # build was interrupted, don't keep building terminate = yield self.stepDone(results, step) # interpret/merge results if terminate: self.terminate = True yield self.startNextStep() @defer.inlineCallbacks def stepDone(self, results, step): """This method is called when the BuildStep completes. It is passed a status object from the BuildStep and is responsible for merging the Step's results into those of the overall Build.""" terminate = False text = None if isinstance(results, tuple): results, text = results assert isinstance(results, type(SUCCESS)), "got %r" % (results,) summary = yield step.getBuildResultSummary() if 'build' in summary: text = [summary['build']] log.msg(" step '%s' complete: %s (%s)" % (step.name, statusToString(results), text)) if text: self.text.extend(text) self.master.data.updates.setBuildStateString(self.buildid, bytes2unicode(" ".join(self.text))) self.results, terminate = computeResultAndTermination(step, results, self.results) if not self.conn: # force the results to retry if the connection was lost self.results = RETRY terminate = True return terminate def lostRemote(self, conn=None): # the worker went away. There are several possible reasons for this, # and they aren't necessarily fatal. For now, kill the build, but # TODO: see if we can resume the build when it reconnects. log.msg("%s.lostRemote" % self) self.conn = None self.text = ["lost", "connection"] self.results = RETRY if self.currentStep and self.currentStep.results is None: # this should cause the step to finish. log.msg(" stopping currentStep", self.currentStep) self.currentStep.interrupt(Failure(error.ConnectionLost())) else: self.text = ["lost", "connection"] self.stopped = True if self._acquiringLock: lock, access, d = self._acquiringLock lock.stopWaitingUntilAvailable(self, access, d) def controlStopBuild(self, key, params): return self.stopBuild(**params) def stopBuild(self, reason="", results=CANCELLED): # the idea here is to let the user cancel a build because, e.g., # they realized they committed a bug and they don't want to waste # the time building something that they know will fail. Another # reason might be to abandon a stuck build. We want to mark the # build as failed quickly rather than waiting for the worker's # timeout to kill it on its own. log.msg(" %s: stopping build: %s %d" % (self, reason, results)) if self.finished: return # TODO: include 'reason' in this point event self.stopped = True if self.currentStep and self.currentStep.results is None: self.currentStep.interrupt(reason) self.results = results if self._acquiringLock: lock, access, d = self._acquiringLock lock.stopWaitingUntilAvailable(self, access, d) def allStepsDone(self): if self.results == FAILURE: text = ["failed"] elif self.results == WARNINGS: text = ["warnings"] elif self.results == EXCEPTION: text = ["exception"] elif self.results == RETRY: text = ["retry"] elif self.results == CANCELLED: text = ["cancelled"] else: text = ["build", "successful"] text.extend(self.text) return self.buildFinished(text, self.results) def buildException(self, why): log.msg("%s.buildException" % self) log.err(why) # try to finish the build, but since we've already faced an exception, # this may not work well. try: self.buildFinished(["build", "exception"], EXCEPTION) except Exception: log.err(Failure(), 'while finishing a build with an exception') @defer.inlineCallbacks def buildFinished(self, text, results): """This method must be called when the last Step has completed. It marks the Build as complete and returns the Builder to the 'idle' state. It takes two arguments which describe the overall build status: text, results. 'results' is one of the possible results (see buildbot.process.results). If 'results' is SUCCESS or WARNINGS, we will permit any dependent builds to start. If it is 'FAILURE', those builds will be abandoned.""" try: self.stopBuildConsumer.stopConsuming() self.finished = True if self.conn: self.subs.unsubscribe() self.subs = None self.conn = None log.msg(" %s: build finished" % self) self.results = worst_status(self.results, results) self.build_status.setText(text) self.build_status.setResults(self.results) self.build_status.buildFinished() eventually(self.releaseLocks) metrics.MetricCountEvent.log('active_builds', -1) yield self.master.data.updates.setBuildStateString(self.buildid, bytes2unicode(" ".join(text))) yield self.master.data.updates.finishBuild(self.buildid, self.results) if self.results == EXCEPTION: # When a build has an exception, put the worker in quarantine for a few seconds # to make sure we try next build with another worker self.workerforbuilder.worker.putInQuarantine() elif self.results != RETRY: # This worker looks sane if status is neither retry or exception # Avoid a race in case the build step reboot the worker if self.workerforbuilder.worker is not None: self.workerforbuilder.worker.resetQuarantine() # mark the build as finished self.workerforbuilder.buildFinished() self.builder.buildFinished(self, self.workerforbuilder) self._tryScheduleBuildsAfterLockUnlock(build_finished=True) except Exception: log.err(None, 'from finishing a build; this is a ' 'serious error - please file a bug at http://buildbot.net') def releaseLocks(self): if self.locks: log.msg("releaseLocks(%s): %s" % (self, self.locks)) for lock, access in self.locks: if lock.isOwner(self, access): lock.release(self, access) self._tryScheduleBuildsAfterLockUnlock(locks_released=True) def _tryScheduleBuildsAfterLockUnlock(self, locks_released=False, build_finished=False): # we need to inform the botmaster to attempt to schedule any pending # build request if we released any locks. This is because buildrequest # may be started for a completely unrelated builder and yet depend on # a lock released by this build. # # TODO: the current approach is dumb as we just attempt to schedule # all buildrequests. A much better idea would be to record the reason # of why a buildrequest was not scheduled in the BuildRequestDistributor # and then attempt to schedule only these buildrequests which may have # had that reason resolved. # this function is complicated by the fact that the botmaster must be # informed only when all locks have been released and the actions in # buildFinished have concluded. Since releaseLocks is called using # eventually this may happen in any order. self._locks_released = self._locks_released or locks_released self._build_finished = self._build_finished or build_finished if not self.locks: return if self._locks_released and self._build_finished: self.builder.botmaster.maybeStartBuildsForAllBuilders() def getSummaryStatistic(self, name, summary_fn, initial_value=_sentinel): step_stats_list = [ st.getStatistic(name) for st in self.executedSteps if st.hasStatistic(name)] if initial_value is self._sentinel: return reduce(summary_fn, step_stats_list) return reduce(summary_fn, step_stats_list, initial_value) @defer.inlineCallbacks def getUrl(self): builder_id = yield self.getBuilderId() return getURLForBuild(self.master, builder_id, self.number) def waitUntilFinished(self): return self.master.mq.waitUntilEvent( ('builds', str(self.buildid), 'finished'), lambda: self.finished) def getWorkerInfo(self): return self.workerforbuilder.worker.worker_status.info # IBuildControl def getStatus(self): return self.build_status # stopBuild is defined earlier components.registerAdapter( lambda build: interfaces.IProperties(build.properties), Build, interfaces.IProperties) buildbot-2.6.0/master/buildbot/process/builder.py000066400000000000000000000471301361162603000221030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings import weakref from twisted.application import service from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot.data import resultspec from buildbot.interfaces import IRenderable from buildbot.process import buildrequest from buildbot.process import workerforbuilder from buildbot.process.build import Build from buildbot.process.properties import Properties from buildbot.process.results import RETRY from buildbot.util import bytes2unicode from buildbot.util import epoch2datetime from buildbot.util import service as util_service def enforceChosenWorker(bldr, workerforbuilder, breq): if 'workername' in breq.properties: workername = breq.properties['workername'] if isinstance(workername, str): return workername == workerforbuilder.worker.workername return True class Builder(util_service.ReconfigurableServiceMixin, service.MultiService): # reconfigure builders before workers reconfig_priority = 196 @property def expectations(self): warnings.warn("'Builder.expectations' is deprecated.") return None def __init__(self, name): super().__init__() self.name = name # this is filled on demand by getBuilderId; don't access it directly self._builderid = None # build/wannabuild slots: Build objects move along this sequence self.building = [] # old_building holds active builds that were stolen from a predecessor self.old_building = weakref.WeakKeyDictionary() # workers which have connected but which are not yet available. # These are always in the ATTACHING state. self.attaching_workers = [] # workers at our disposal. Each WorkerForBuilder instance has a # .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a # Build is about to start, to make sure that they're still alive. self.workers = [] self.config = None self.builder_status = None # Tracks config version for locks self.config_version = None @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # find this builder in the config for builder_config in new_config.builders: if builder_config.name == self.name: found_config = True break assert found_config, "no config found for builder '%s'" % self.name # set up a builder status object on the first reconfig if not self.builder_status: self.builder_status = self.master.status.builderAdded( name=builder_config.name, basedir=builder_config.builddir, tags=builder_config.tags, description=builder_config.description) self.config = builder_config self.config_version = self.master.config_version # allocate builderid now, so that the builder is visible in the web # UI; without this, the builder wouldn't appear until it preformed a # build. builderid = yield self.getBuilderId() self.master.data.updates.updateBuilderInfo(builderid, builder_config.description, builder_config.tags) self.builder_status.setDescription(builder_config.description) self.builder_status.setTags(builder_config.tags) self.builder_status.setWorkernames(self.config.workernames) self.builder_status.setCacheSize(new_config.caches['Builds']) # if we have any workers attached which are no longer configured, # drop them. new_workernames = set(builder_config.workernames) self.workers = [w for w in self.workers if w.worker.workername in new_workernames] def __repr__(self): return "" % (self.name, id(self)) def getBuilderIdForName(self, name): # buildbot.config should ensure this is already unicode, but it doesn't # hurt to check again name = bytes2unicode(name) return self.master.data.updates.findBuilderId(name) def getBuilderId(self): # since findBuilderId is idempotent, there's no reason to add # additional locking around this function. if self._builderid: return defer.succeed(self._builderid) d = self.getBuilderIdForName(self.name) @d.addCallback def keep(builderid): self._builderid = builderid return builderid return d @defer.inlineCallbacks def getOldestRequestTime(self): """Returns the submitted_at of the oldest unclaimed build request for this builder, or None if there are no build requests. @returns: datetime instance or None, via Deferred """ bldrid = yield self.getBuilderId() unclaimed = yield self.master.data.get( ('builders', bldrid, 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])], order=['submitted_at'], limit=1) if unclaimed: return unclaimed[0]['submitted_at'] @defer.inlineCallbacks def getNewestCompleteTime(self): """Returns the complete_at of the latest completed build request for this builder, or None if there are no such build requests. @returns: datetime instance or None, via Deferred """ bldrid = yield self.getBuilderId() completed = yield self.master.data.get( ('builders', bldrid, 'buildrequests'), [resultspec.Filter('complete', 'eq', [False])], order=['-complete_at'], limit=1) if completed: return completed[0]['complete_at'] else: return None def getBuild(self, number): for b in self.building: if b.build_status and b.build_status.number == number: return b for b in self.old_building: if b.build_status and b.build_status.number == number: return b return None def addLatentWorker(self, worker): assert interfaces.ILatentWorker.providedBy(worker) for w in self.workers: if w == worker: break else: wfb = workerforbuilder.LatentWorkerForBuilder(worker, self) self.workers.append(wfb) self.botmaster.maybeStartBuildsForBuilder(self.name) @defer.inlineCallbacks def attached(self, worker, commands): """This is invoked by the Worker when the self.workername bot registers their builder. @type worker: L{buildbot.worker.Worker} @param worker: the Worker that represents the worker as a whole @type commands: dict: string -> string, or None @param commands: provides the worker's version of each RemoteCommand @rtype: L{twisted.internet.defer.Deferred} @return: a Deferred that fires (with 'self') when the worker-side builder is fully attached and ready to accept commands. """ for w in self.attaching_workers + self.workers: if w.worker == worker: # already attached to them. This is fairly common, since # attached() gets called each time we receive the builder # list from the worker, and we ask for it each time we add or # remove a builder. So if the worker is hosting builders # A,B,C, and the config file changes A, we'll remove A and # re-add it, triggering two builder-list requests, getting # two redundant calls to attached() for B, and another two # for C. # # Therefore, when we see that we're already attached, we can # just ignore it. return self wfb = workerforbuilder.WorkerForBuilder() wfb.setBuilder(self) self.attaching_workers.append(wfb) try: wfb = yield wfb.attached(worker, commands) self.attaching_workers.remove(wfb) self.workers.append(wfb) return self except Exception as e: # pragma: no cover # already log.err'ed by WorkerForBuilder._attachFailure # TODO: remove from self.workers (except that detached() should get # run first, right?) log.err(e, 'worker failed to attach') return None def detached(self, worker): """This is called when the connection to the bot is lost.""" for wfb in self.attaching_workers + self.workers: if wfb.worker == worker: break else: log.msg("WEIRD: Builder.detached(%s) (%s)" " not in attaching_workers(%s)" " or workers(%s)" % (worker, worker.workername, self.attaching_workers, self.workers)) return if wfb in self.attaching_workers: self.attaching_workers.remove(wfb) if wfb in self.workers: self.workers.remove(wfb) # inform the WorkerForBuilder that their worker went away wfb.detached() def getAvailableWorkers(self): return [wfb for wfb in self.workers if wfb.isAvailable()] @defer.inlineCallbacks def canStartBuild(self, workerforbuilder, buildrequest): can_start = True # check whether the locks that the build will acquire can actually be # acquired locks = self.config.locks worker = workerforbuilder.worker props = None # don't unnecessarily setup properties for build def setupPropsIfNeeded(props): if props is not None: return props = Properties() Build.setupPropertiesKnownBeforeBuildStarts(props, [buildrequest], self, workerforbuilder) return props if worker.builds_may_be_incompatible: # Check if the latent worker is actually compatible with the build. # The instance type of the worker may depend on the properties of # the build that substantiated it. props = setupPropsIfNeeded(props) can_start = yield worker.isCompatibleWithBuild(props) if not can_start: return False if IRenderable.providedBy(locks): # collect properties that would be set for a build if we # started it now and render locks using it props = setupPropsIfNeeded(props) locks = yield props.render(locks) locks = yield self.botmaster.getLockFromLockAccesses(locks, self.config_version) if locks: can_start = Build._canAcquireLocks(locks, workerforbuilder) if can_start is False: return can_start if callable(self.config.canStartBuild): can_start = yield self.config.canStartBuild(self, workerforbuilder, buildrequest) return can_start @defer.inlineCallbacks def _startBuildFor(self, workerforbuilder, buildrequests): build = self.config.factory.newBuild(buildrequests) build.setBuilder(self) props = build.getProperties() # give the properties a reference back to this build props.build = build Build.setupPropertiesKnownBeforeBuildStarts( props, build.requests, build.builder, workerforbuilder) log.msg("starting build %s using worker %s" % (build, workerforbuilder)) # set up locks locks = yield build.render(self.config.locks) yield build.setLocks(locks) if self.config.env: build.setWorkerEnvironment(self.config.env) # append the build to self.building self.building.append(build) # The worker is ready to go. workerforbuilder.buildStarted() sets its # state to BUILDING (so we won't try to use it for any other builds). # This gets set back to IDLE by the Build itself when it finishes. # Note: This can't be done in `Build.startBuild`, since it needs to be done # synchronously, before the BuildRequestDistributor looks at # another build request. workerforbuilder.buildStarted() # create the BuildStatus object that goes with the Build bs = self.builder_status.newBuild() # let status know self.master.status.build_started(buildrequests[0].id, self.name, bs) # start the build. This will first set up the steps, then tell the # BuildStatus that it has started, which will announce it to the world # (through our BuilderStatus object, which is its parent). Finally it # will start the actual build process. This is done with a fresh # Deferred since _startBuildFor should not wait until the build is # finished. This uses `maybeDeferred` to ensure that any exceptions # raised by startBuild are treated as deferred errbacks (see # http://trac.buildbot.net/ticket/2428). d = defer.maybeDeferred(build.startBuild, bs, workerforbuilder) # this shouldn't happen. if it does, the worker will be wedged d.addErrback(log.err, 'from a running build; this is a ' 'serious error - please file a bug at http://buildbot.net') return True def setupProperties(self, props): props.setProperty("buildername", self.name, "Builder") if self.config.properties: for propertyname in self.config.properties: props.setProperty(propertyname, self.config.properties[propertyname], "Builder") if self.config.defaultProperties: for propertyname in self.config.defaultProperties: if propertyname not in props: props.setProperty(propertyname, self.config.defaultProperties[propertyname], "Builder") def buildFinished(self, build, wfb): """This is called when the Build has finished (either success or failure). Any exceptions during the build are reported with results=FAILURE, not with an errback.""" # by the time we get here, the Build has already released the worker, # which will trigger a check for any now-possible build requests # (maybeStartBuilds) results = build.build_status.getResults() self.building.remove(build) if results == RETRY: d = self._resubmit_buildreqs(build) d.addErrback(log.err, 'while resubmitting a build request') else: complete_at_epoch = self.master.reactor.seconds() complete_at = epoch2datetime(complete_at_epoch) brids = [br.id for br in build.requests] d = self.master.data.updates.completeBuildRequests( brids, results, complete_at=complete_at) # nothing in particular to do with this deferred, so just log it if # it fails.. d.addErrback(log.err, 'while marking build requests as completed') if wfb.worker: wfb.worker.releaseLocks() def _resubmit_buildreqs(self, build): brids = [br.id for br in build.requests] d = self.master.data.updates.unclaimBuildRequests(brids) @d.addCallback def notify(_): pass # XXX method does not exist # self._msg_buildrequests_unclaimed(build.requests) return d # Build Creation def maybeStartBuild(self, workerforbuilder, breqs): # This method is called by the botmaster whenever this builder should # start a set of buildrequests on a worker. Do not call this method # directly - use master.botmaster.maybeStartBuildsForBuilder, or one of # the other similar methods if more appropriate # first, if we're not running, then don't start builds; stopService # uses this to ensure that any ongoing maybeStartBuild invocations # are complete before it stops. if not self.running: return defer.succeed(False) # If the build fails from here on out (e.g., because a worker has failed), # it will be handled outside of this function. TODO: test that! return self._startBuildFor(workerforbuilder, breqs) # a few utility functions to make the maybeStartBuild a bit shorter and # easier to read def getCollapseRequestsFn(self): """Helper function to determine which collapseRequests function to use from L{_collapseRequests}, or None for no merging""" # first, seek through builder, global, and the default collapseRequests_fn = self.config.collapseRequests if collapseRequests_fn is None: collapseRequests_fn = self.master.config.collapseRequests if collapseRequests_fn is None: collapseRequests_fn = True # then translate False and True properly if collapseRequests_fn is False: collapseRequests_fn = None elif collapseRequests_fn is True: collapseRequests_fn = self._defaultCollapseRequestFn return collapseRequests_fn @staticmethod def _defaultCollapseRequestFn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(master, brdict1, brdict2) @implementer(interfaces.IBuilderControl) class BuilderControl: def __init__(self, builder, control): self.original = builder self.control = control @defer.inlineCallbacks def getPendingBuildRequestControls(self): master = self.original.master # TODO Use DATA API brdicts = yield master.db.buildrequests.getBuildRequests( buildername=self.original.name, claimed=False) # convert those into BuildRequest objects buildrequests = [] for brdict in brdicts: br = yield buildrequest.BuildRequest.fromBrdict( self.control.master, brdict) buildrequests.append(br) # and return the corresponding control objects return [buildrequest.BuildRequestControl(self.original, r) for r in buildrequests] def getBuild(self, number): return self.original.getBuild(number) def ping(self): if not self.original.workers: return defer.succeed(False) # interfaces.NoWorkerError dl = [] for w in self.original.workers: dl.append(w.ping(self.original.builder_status)) d = defer.DeferredList(dl) d.addCallback(self._gatherPingResults) return d def _gatherPingResults(self, res): for ignored, success in res: if not success: return False return True buildbot-2.6.0/master/buildbot/process/buildrequest.py000066400000000000000000000312461361162603000231660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar from twisted.internet import defer from buildbot.data import resultspec from buildbot.process import properties from buildbot.process.results import SKIPPED class BuildRequestCollapser: # brids is a list of the new added buildrequests id # This class is called before generated the 'new' event for the # buildrequest # Before adding new buildset/buildrequests, we must examine each unclaimed # buildrequest. # EG: # 1. get the list of all unclaimed buildrequests: # - We must exclude all buildsets which have at least 1 claimed buildrequest # 2. For each unclaimed buildrequests, if compatible with the new request # (sourcestamps match, except for revision) Then: # 2.1. claim it # 2.2. complete it with result SKIPPED def __init__(self, master, brids): self.master = master self.brids = brids @defer.inlineCallbacks def _getUnclaimedBrs(self, builderid): # Retrieve the list of Brs for all unclaimed builds unclaim_brs = yield self.master.data.get(('builders', builderid, 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])]) # sort by submitted_at, so the first is the oldest unclaim_brs.sort(key=lambda brd: brd['submitted_at']) return unclaim_brs @defer.inlineCallbacks def collapse(self): brids = set() for brid in self.brids: # Get the BuildRequest object br = yield self.master.data.get(('buildrequests', brid)) # Retrieve the buildername builderid = br['builderid'] bldrdict = yield self.master.data.get(('builders', builderid)) # Get the builder object bldr = self.master.botmaster.builders.get(bldrdict['name']) # Get the Collapse BuildRequest function (from the configuration) collapseRequestsFn = bldr.getCollapseRequestsFn() if bldr else None unclaim_brs = yield self._getUnclaimedBrs(builderid) # short circuit if there is no merging to do if not collapseRequestsFn or not unclaim_brs: continue for unclaim_br in unclaim_brs: if unclaim_br['buildrequestid'] == br['buildrequestid']: continue canCollapse = yield collapseRequestsFn(self.master, bldr, br, unclaim_br) if canCollapse is True: brids.add(unclaim_br['buildrequestid']) brids = list(brids) if brids: # Claim the buildrequests yield self.master.data.updates.claimBuildRequests(brids) # complete the buildrequest with result SKIPPED. yield self.master.data.updates.completeBuildRequests(brids, SKIPPED) return brids class TempSourceStamp: # temporary fake sourcestamp ATTRS = ('branch', 'revision', 'repository', 'project', 'codebase') def __init__(self, ssdict): self._ssdict = ssdict def __getattr__(self, attr): patch = self._ssdict.get('patch') if attr == 'patch': if patch: return (patch['level'], patch['body'], patch['subdir']) return None elif attr == 'patch_info': if patch: return (patch['author'], patch['comment']) return (None, None) elif attr in self.ATTRS or attr == 'ssid': return self._ssdict[attr] raise AttributeError(attr) def asSSDict(self): return self._ssdict PATCH_ATTRS = ('level', 'body', 'subdir', 'author', 'comment') def asDict(self): # This return value should match the kwargs to # SourceStampsConnectorComponent.findSourceStampId result = {} for attr in self.ATTRS: result[attr] = self._ssdict.get(attr) patch = self._ssdict.get('patch') or {} for attr in self.PATCH_ATTRS: result['patch_%s' % attr] = patch.get(attr) assert all( isinstance(val, (str, int, type(None))) for attr, val in result.items() ), result return result class TempChange: # temporary fake change def __init__(self, d): self._chdict = d def __getattr__(self, attr): if attr == 'who': return self._chdict['author'] elif attr == 'properties': return properties.Properties.fromDict(self._chdict['properties']) return self._chdict[attr] def asChDict(self): return self._chdict class BuildRequest: """ A rolled-up encapsulation of all of the data relevant to a build request. This class is used by the C{nextBuild} and C{collapseRequests} configuration parameters, as well as in starting a build. Construction of a BuildRequest object is a heavyweight process involving a lot of database queries, so it should be avoided where possible. See bug #1894. @type reason: string @ivar reason: the reason this Build is being requested. Schedulers provide this, but for forced builds the user requesting the build will provide a string. It comes from the buildsets table. @type properties: L{properties.Properties} @ivar properties: properties that should be applied to this build, taken from the buildset containing this build request @ivar submittedAt: a timestamp (seconds since epoch) when this request was submitted to the Builder. This is used by the CVS step to compute a checkout timestamp, as well as by the master to prioritize build requests from oldest to newest. @ivar buildername: name of the requested builder @ivar priority: request priority @ivar id: build request ID @ivar bsid: ID of the parent buildset """ submittedAt = None sources = {} @classmethod def fromBrdict(cls, master, brdict): """ Construct a new L{BuildRequest} from a dictionary as returned by L{BuildRequestsConnectorComponent.getBuildRequest}. This method uses a cache, which may result in return of stale objects; for the most up-to-date information, use the database connector methods. @param master: current build master @param brdict: build request dictionary @returns: L{BuildRequest}, via Deferred """ cache = master.caches.get_cache("BuildRequests", cls._make_br) return cache.get(brdict['buildrequestid'], brdict=brdict, master=master) @classmethod @defer.inlineCallbacks def _make_br(cls, brid, brdict, master): buildrequest = cls() buildrequest.id = brid buildrequest.bsid = brdict['buildsetid'] builder = yield master.db.builders.getBuilder(brdict['builderid']) buildrequest.buildername = builder['name'] buildrequest.builderid = brdict['builderid'] buildrequest.priority = brdict['priority'] dt = brdict['submitted_at'] buildrequest.submittedAt = dt and calendar.timegm(dt.utctimetuple()) buildrequest.master = master buildrequest.waitedFor = brdict['waited_for'] # fetch the buildset to get the reason buildset = yield master.db.buildsets.getBuildset(brdict['buildsetid']) assert buildset # schema should guarantee this buildrequest.reason = buildset['reason'] # fetch the buildset properties, and convert to Properties buildset_properties = yield master.db.buildsets.getBuildsetProperties(brdict['buildsetid']) buildrequest.properties = properties.Properties.fromDict( buildset_properties) # make a fake sources dict (temporary) bsdata = yield master.data.get(('buildsets', str(buildrequest.bsid))) assert bsdata[ 'sourcestamps'], "buildset must have at least one sourcestamp" buildrequest.sources = {} for ssdata in bsdata['sourcestamps']: ss = buildrequest.sources[ssdata['codebase']] = TempSourceStamp(ssdata) changes = yield master.data.get(("sourcestamps", ss.ssid, "changes")) ss.changes = [TempChange(change) for change in changes] return buildrequest @staticmethod @defer.inlineCallbacks def canBeCollapsed(master, br1, br2): """ Returns true if both buildrequest can be merged, via Deferred. This implements Buildbot's default collapse strategy. """ # short-circuit: if these are for the same buildset, collapse away if br1['buildsetid'] == br2['buildsetid']: return True # get the buidlsets for each buildrequest selfBuildsets = yield master.data.get( ('buildsets', str(br1['buildsetid']))) otherBuildsets = yield master.data.get( ('buildsets', str(br2['buildsetid']))) # extract sourcestamps, as dictionaries by codebase selfSources = dict((ss['codebase'], ss) for ss in selfBuildsets['sourcestamps']) otherSources = dict((ss['codebase'], ss) for ss in otherBuildsets['sourcestamps']) # if the sets of codebases do not match, we can't collapse if set(selfSources) != set(otherSources): return False for c, selfSS in selfSources.items(): otherSS = otherSources[c] if selfSS['repository'] != otherSS['repository']: return False if selfSS['branch'] != otherSS['branch']: return False if selfSS['project'] != otherSS['project']: return False # anything with a patch won't be collapsed if selfSS['patch'] or otherSS['patch']: return False # get changes & compare selfChanges = yield master.data.get(('sourcestamps', selfSS['ssid'], 'changes')) otherChanges = yield master.data.get(('sourcestamps', otherSS['ssid'], 'changes')) # if both have changes, proceed, else fail - if no changes check revision instead if selfChanges and otherChanges: continue if selfChanges and not otherChanges: return False if not selfChanges and otherChanges: return False # else check revisions if selfSS['revision'] != otherSS['revision']: return False return True def mergeSourceStampsWith(self, others): """ Returns one merged sourcestamp for every codebase """ # get all codebases from all requests all_codebases = set(self.sources) for other in others: all_codebases |= set(other.sources) all_merged_sources = {} # walk along the codebases for codebase in all_codebases: all_sources = [] if codebase in self.sources: all_sources.append(self.sources[codebase]) for other in others: if codebase in other.sources: all_sources.append(other.sources[codebase]) assert all_sources, "each codebase should have at least one sourcestamp" # TODO: select the sourcestamp that best represents the merge, # preferably the latest one. This used to be accomplished by # looking at changeids and picking the highest-numbered. all_merged_sources[codebase] = all_sources[-1] return list(all_merged_sources.values()) def mergeReasons(self, others): """Return a reason for the merged build request.""" reasons = [] for req in [self] + others: if req.reason and req.reason not in reasons: reasons.append(req.reason) return ", ".join(reasons) def getSubmitTime(self): return self.submittedAt buildbot-2.6.0/master/buildbot/process/buildrequestdistributor.py000066400000000000000000000461241361162603000254620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import random from datetime import datetime from dateutil.tz import tzutc from twisted.internet import defer from twisted.python import log from twisted.python.failure import Failure from buildbot.data import resultspec from buildbot.process import metrics from buildbot.process.buildrequest import BuildRequest from buildbot.util import epoch2datetime from buildbot.util import service class BuildChooserBase: # # WARNING: This API is experimental and in active development. # # This internal object selects a new build+worker pair. It acts as a # generator, initializing its state on creation and offering up new # pairs until exhaustion. The object can be destroyed at any time # (eg, before the list exhausts), and can be "restarted" by abandoning # an old instance and creating a new one. # # The entry point is: # * bc.chooseNextBuild() - get the next (worker, [breqs]) or # (None, None) # # The default implementation of this class implements a default # chooseNextBuild() that delegates out to two other functions: # * bc.popNextBuild() - get the next (worker, breq) pair def __init__(self, bldr, master): self.bldr = bldr self.master = master self.breqCache = {} self.unclaimedBrdicts = None @defer.inlineCallbacks def chooseNextBuild(self): # Return the next build, as a (worker, [breqs]) pair worker, breq = yield self.popNextBuild() if not worker or not breq: return (None, None) return (worker, [breq]) # Must be implemented by subclass def popNextBuild(self): # Pick the next (worker, breq) pair; note this is pre-merge, so # it's just one breq raise NotImplementedError("Subclasses must implement this!") # - Helper functions that are generally useful to all subclasses - @defer.inlineCallbacks def _fetchUnclaimedBrdicts(self): # Sets up a cache of all the unclaimed brdicts. The cache is # saved at self.unclaimedBrdicts cache. If the cache already # exists, this function does nothing. If a refetch is desired, set # the self.unclaimedBrdicts to None before calling.""" if self.unclaimedBrdicts is None: # TODO: use order of the DATA API brdicts = yield self.master.data.get(('builders', (yield self.bldr.getBuilderId()), 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])]) # sort by submitted_at, so the first is the oldest brdicts.sort(key=lambda brd: brd['submitted_at']) self.unclaimedBrdicts = brdicts return self.unclaimedBrdicts @defer.inlineCallbacks def _getBuildRequestForBrdict(self, brdict): # Turn a brdict into a BuildRequest into a brdict. This is useful # for API like 'nextBuild', which operate on BuildRequest objects. breq = self.breqCache.get(brdict['buildrequestid']) if not breq: breq = yield BuildRequest.fromBrdict(self.master, brdict) if breq: self.breqCache[brdict['buildrequestid']] = breq return breq def _getBrdictForBuildRequest(self, breq): # Turn a BuildRequest back into a brdict. This operates from the # cache, which must be set up once via _fetchUnclaimedBrdicts if breq is None: return None brid = breq.id for brdict in self.unclaimedBrdicts: if brid == brdict['buildrequestid']: return brdict return None def _removeBuildRequest(self, breq): # Remove a BuildrRequest object (and its brdict) # from the caches if breq is None: return brdict = self._getBrdictForBuildRequest(breq) if brdict is not None: self.unclaimedBrdicts.remove(brdict) if breq.id in self.breqCache: del self.breqCache[breq.id] def _getUnclaimedBuildRequests(self): # Retrieve the list of BuildRequest objects for all unclaimed builds return defer.gatherResults([ self._getBuildRequestForBrdict(brdict) for brdict in self.unclaimedBrdicts]) class BasicBuildChooser(BuildChooserBase): # BasicBuildChooser generates build pairs via the configuration points: # * config.nextWorker (or random.choice if not set) # * config.nextBuild (or "pop top" if not set) # # For N workers, this will call nextWorker at most N times. If nextWorker # returns a worker that cannot satisfy the build chosen by nextBuild, # it will search for a worker that can satisfy the build. If one is found, # the workers that cannot be used are "recycled" back into a list # to be tried, in order, for the next chosen build. # # We check whether Builder.canStartBuild returns True for a particular # worker. It evaluates any Build properties that are known before build # and checks whether the worker may satisfy them. For example, the worker # must have the locks available. def __init__(self, bldr, master): super().__init__(bldr, master) self.nextWorker = self.bldr.config.nextWorker if not self.nextWorker: self.nextWorker = lambda _, workers, __: random.choice( workers) if workers else None self.workerpool = self.bldr.getAvailableWorkers() # Pick workers one at a time from the pool, and if the Builder says # they're usable (eg, locks can be satisfied), then prefer those # workers. self.preferredWorkers = [] self.nextBuild = self.bldr.config.nextBuild @defer.inlineCallbacks def popNextBuild(self): nextBuild = (None, None) while True: # 1. pick a build breq = yield self._getNextUnclaimedBuildRequest() if not breq: break # 2. pick a worker worker = yield self._popNextWorker(breq) if not worker: break # either satisfy this build or we leave it for another day self._removeBuildRequest(breq) # 3. make sure worker+ is usable for the breq recycledWorkers = [] while worker: canStart = yield self.canStartBuild(worker, breq) if canStart: break # try a different worker recycledWorkers.append(worker) worker = yield self._popNextWorker(breq) # recycle the workers that we didn't use to the head of the queue # this helps ensure we run 'nextWorker' only once per worker choice if recycledWorkers: self._unpopWorkers(recycledWorkers) # 4. done? otherwise we will try another build if worker: nextBuild = (worker, breq) break return nextBuild @defer.inlineCallbacks def _getNextUnclaimedBuildRequest(self): # ensure the cache is there yield self._fetchUnclaimedBrdicts() if not self.unclaimedBrdicts: return None if self.nextBuild: # nextBuild expects BuildRequest objects breqs = yield self._getUnclaimedBuildRequests() try: nextBreq = yield self.nextBuild(self.bldr, breqs) if nextBreq not in breqs: nextBreq = None except Exception: log.err(Failure(), "from _getNextUnclaimedBuildRequest for builder '%s'" % (self.bldr,)) nextBreq = None else: # otherwise just return the first build brdict = self.unclaimedBrdicts[0] nextBreq = yield self._getBuildRequestForBrdict(brdict) return nextBreq @defer.inlineCallbacks def _popNextWorker(self, buildrequest): # use 'preferred' workers first, if we have some ready if self.preferredWorkers: worker = self.preferredWorkers.pop(0) return worker while self.workerpool: try: worker = yield self.nextWorker(self.bldr, self.workerpool, buildrequest) except Exception: log.err(Failure(), "from nextWorker for builder '%s'" % (self.bldr,)) worker = None if not worker or worker not in self.workerpool: # bad worker or no worker returned break self.workerpool.remove(worker) return worker return None def _unpopWorkers(self, workers): # push the workers back to the front self.preferredWorkers[:0] = workers def canStartBuild(self, worker, breq): return self.bldr.canStartBuild(worker, breq) class BuildRequestDistributor(service.AsyncMultiService): """ Special-purpose class to handle distributing build requests to builders by calling their C{maybeStartBuild} method. This takes account of the C{prioritizeBuilders} configuration, and is highly re-entrant; that is, if a new build request arrives while builders are still working on the previous build request, then this class will correctly re-prioritize invocations of builders' C{maybeStartBuild} methods. """ BuildChooser = BasicBuildChooser def __init__(self, botmaster): super().__init__() self.botmaster = botmaster # lock to ensure builders are only sorted once at any time self.pending_builders_lock = defer.DeferredLock() # sorted list of names of builders that need their maybeStartBuild # method invoked. self._pending_builders = [] self.activity_lock = defer.DeferredLock() self.active = False self._pendingMSBOCalls = [] self._activity_loop_deferred = None @defer.inlineCallbacks def stopService(self): # Lots of stuff happens asynchronously here, so we need to let it all # quiesce. First, let the parent stopService succeed between # activities; then the loop will stop calling itself, since # self.running is false. yield self.activity_lock.run(service.AsyncService.stopService, self) # now let any outstanding calls to maybeStartBuildsOn to finish, so # they don't get interrupted in mid-stride. This tends to be # particularly painful because it can occur when a generator is gc'd. # TEST-TODO: this behavior is not asserted in any way. if self._pendingMSBOCalls: yield defer.DeferredList(self._pendingMSBOCalls) @defer.inlineCallbacks def maybeStartBuildsOn(self, new_builders): """ Try to start any builds that can be started right now. This function returns immediately, and promises to trigger those builders eventually. @param new_builders: names of new builders that should be given the opportunity to check for new requests. """ if not self.running: return d = self._maybeStartBuildsOn(new_builders) self._pendingMSBOCalls.append(d) try: yield d except Exception as e: # pragma: no cover log.err(e, "while starting builds on {0}".format(new_builders)) finally: self._pendingMSBOCalls.remove(d) @defer.inlineCallbacks def _maybeStartBuildsOn(self, new_builders): new_builders = set(new_builders) existing_pending = set(self._pending_builders) # if we won't add any builders, there's nothing to do if new_builders < existing_pending: return None # reset the list of pending builders @defer.inlineCallbacks def resetPendingBuildersList(new_builders): try: # re-fetch existing_pending, in case it has changed # while acquiring the lock existing_pending = set(self._pending_builders) # then sort the new, expanded set of builders self._pending_builders = \ yield self._sortBuilders( list(existing_pending | new_builders)) # start the activity loop, if we aren't already # working on that. if not self.active: self._activity_loop_deferred = self._activityLoop() except Exception: # pragma: no cover log.err(Failure(), "while attempting to start builds on %s" % self.name) yield self.pending_builders_lock.run( resetPendingBuildersList, new_builders) @defer.inlineCallbacks def _defaultSorter(self, master, builders): timer = metrics.Timer("BuildRequestDistributor._defaultSorter()") timer.start() # perform an asynchronous schwarzian transform, transforming None # into sys.maxint so that it sorts to the end def xform(bldr): d = defer.maybeDeferred(bldr.getOldestRequestTime) d.addCallback(lambda time: (((time is None) and None or time), bldr)) return d xformed = yield defer.gatherResults( [xform(bldr) for bldr in builders]) # sort the transformed list synchronously, comparing None to the end of # the list def xformedKey(a): """ Key function can be used to sort a list where each list element is a tuple: (datetime.datetime, Builder) @return: a tuple of (date, builder name) """ (date, builder) = a if date is None: # Choose a really big date, so that any # date set to 'None' will appear at the # end of the list during comparisons. date = datetime.max # Need to set the timezone on the date, in order # to perform comparisons with other dates which # have the time zone set. date = date.replace(tzinfo=tzutc()) return (date, builder.name) xformed.sort(key=xformedKey) # and reverse the transform rv = [xf[1] for xf in xformed] timer.stop() return rv @defer.inlineCallbacks def _sortBuilders(self, buildernames): timer = metrics.Timer("BuildRequestDistributor._sortBuilders()") timer.start() # note that this takes and returns a list of builder names # convert builder names to builders builders_dict = self.botmaster.builders builders = [builders_dict.get(n) for n in buildernames if n in builders_dict] # find a sorting function sorter = self.master.config.prioritizeBuilders if not sorter: sorter = self._defaultSorter # run it try: builders = yield defer.maybeDeferred(sorter, self.master, builders) except Exception: log.err(Failure(), "prioritizing builders; order unspecified") # and return the names rv = [b.name for b in builders] timer.stop() return rv @defer.inlineCallbacks def _activityLoop(self): self.active = True timer = metrics.Timer('BuildRequestDistributor._activityLoop()') timer.start() pending_builders = [] while True: yield self.activity_lock.acquire() if not self.running: self.activity_lock.release() break if not pending_builders: # lock pending_builders, pop an element from it, and release yield self.pending_builders_lock.acquire() # bail out if we shouldn't keep looping if not self._pending_builders: self.pending_builders_lock.release() self.activity_lock.release() break # take that builder list, and run it until the end # we make a copy of it, as it could be modified meanwhile pending_builders = copy.copy(self._pending_builders) self._pending_builders = [] self.pending_builders_lock.release() bldr_name = pending_builders.pop(0) # get the actual builder object bldr = self.botmaster.builders.get(bldr_name) try: if bldr: yield self._maybeStartBuildsOnBuilder(bldr) except Exception: log.err(Failure(), "from maybeStartBuild for builder '%s'" % (bldr_name,)) self.activity_lock.release() timer.stop() self.active = False @defer.inlineCallbacks def _maybeStartBuildsOnBuilder(self, bldr): # create a chooser to give us our next builds # this object is temporary and will go away when we're done bc = self.createBuildChooser(bldr, self.master) while True: worker, breqs = yield bc.chooseNextBuild() if not worker or not breqs: break # claim brid's brids = [br.id for br in breqs] claimed_at_epoch = self.master.reactor.seconds() claimed_at = epoch2datetime(claimed_at_epoch) if not (yield self.master.data.updates.claimBuildRequests( brids, claimed_at=claimed_at)): # some brids were already claimed, so start over bc = self.createBuildChooser(bldr, self.master) continue buildStarted = yield bldr.maybeStartBuild(worker, breqs) if not buildStarted: yield self.master.data.updates.unclaimBuildRequests(brids) # try starting builds again. If we still have a working worker, # then this may re-claim the same buildrequests self.botmaster.maybeStartBuildsForBuilder(self.name) def createBuildChooser(self, bldr, master): # just instantiate the build chooser requested return self.BuildChooser(bldr, master) @defer.inlineCallbacks def _waitForFinish(self): if self._activity_loop_deferred is not None: yield self._activity_loop_deferred buildbot-2.6.0/master/buildbot/process/buildstep.py000066400000000000000000001335201361162603000224470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import re import sys from twisted.internet import defer from twisted.internet import error from twisted.python import components from twisted.python import deprecate from twisted.python import failure from twisted.python import log from twisted.python import util as twutil from twisted.python import versions from twisted.python.compat import NativeStringIO from twisted.python.failure import Failure from twisted.python.reflect import accumulateClassList from twisted.web.util import formatFailure from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.interfaces import IRenderable from buildbot.interfaces import WorkerTooOldError from buildbot.process import log as plog from buildbot.process import logobserver from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process import results # (WithProperties used to be available in this module) from buildbot.process.properties import WithProperties from buildbot.process.results import ALL_RESULTS from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.process.results import worst_status from buildbot.util import bytes2unicode from buildbot.util import debounce from buildbot.util import flatten class BuildStepFailed(Exception): pass class BuildStepCancelled(Exception): # used internally for signalling pass class CallableAttributeError(Exception): # attribute error raised from a callable run inside a property pass # old import paths for these classes RemoteCommand = remotecommand.RemoteCommand LoggedRemoteCommand = remotecommand.LoggedRemoteCommand RemoteShellCommand = remotecommand.RemoteShellCommand LogObserver = logobserver.LogObserver LogLineObserver = logobserver.LogLineObserver OutputProgressObserver = logobserver.OutputProgressObserver _hush_pyflakes = [ RemoteCommand, LoggedRemoteCommand, RemoteShellCommand, LogObserver, LogLineObserver, OutputProgressObserver] @implementer(interfaces.IBuildStepFactory) class _BuildStepFactory(util.ComparableMixin): """ This is a wrapper to record the arguments passed to as BuildStep subclass. We use an instance of this class, rather than a closure mostly to make it easier to test that the right factories are getting created. """ compare_attrs = ('factory', 'args', 'kwargs') def __init__(self, factory, *args, **kwargs): self.factory = factory self.args = args self.kwargs = kwargs def buildStep(self): try: return self.factory(*self.args, **self.kwargs) except Exception: log.msg("error while creating step, factory=%s, args=%s, kwargs=%s" % (self.factory, self.args, self.kwargs)) raise def _maybeUnhandled(fn): def wrap(self, *args, **kwargs): d = fn(self, *args, **kwargs) if self._start_unhandled_deferreds is not None: self._start_unhandled_deferreds.append(d) return d wrap.__wrapped__ = fn twutil.mergeFunctionMetadata(fn, wrap) return wrap class SyncLogFileWrapper(logobserver.LogObserver): # A temporary wrapper around process.log.Log to emulate *synchronous* # writes to the logfile by handling the Deferred from each add* operation # as part of the step's _start_unhandled_deferreds. This has to handle # the tricky case of adding data to a log *before* addLog has returned! # this also adds the read-only methods such as getText # old constants from the status API HEADER = 0 STDERR = 1 STDOUT = 2 def __init__(self, step, name, addLogDeferred): self.step = step self.name = name self.delayedOperations = [] self.asyncLogfile = None self.chunks = [] self.finished = False self.finishDeferreds = [] self.step._sync_addlog_deferreds.append(addLogDeferred) @addLogDeferred.addCallback def gotAsync(log): self.asyncLogfile = log self._catchup() return log # run _catchup even if there's an error; it will helpfully generate # a whole bunch more! @addLogDeferred.addErrback def problem(f): self._catchup() return f def _catchup(self): if not self.asyncLogfile or not self.delayedOperations: return op = self.delayedOperations.pop(0) try: d = defer.maybeDeferred(op) except Exception: d = defer.fail(failure.Failure()) @d.addBoth def next(x): self._catchup() return x self.step._start_unhandled_deferreds.append(d) def _delay(self, op): self.delayedOperations.append(op) if len(self.delayedOperations) == 1: self._catchup() def _maybeFinished(self): if self.finished and self.finishDeferreds: pending = self.finishDeferreds self.finishDeferreds = [] for d in pending: d.callback(self) # write methods def addStdout(self, data): data = bytes2unicode(data) self.chunks.append((self.STDOUT, data)) self._delay(lambda: self.asyncLogfile.addStdout(data)) def addStderr(self, data): data = bytes2unicode(data) self.chunks.append((self.STDERR, data)) self._delay(lambda: self.asyncLogfile.addStderr(data)) def addHeader(self, data): data = bytes2unicode(data) self.chunks.append((self.HEADER, data)) self._delay(lambda: self.asyncLogfile.addHeader(data)) def finish(self): self.finished = True self._maybeFinished() # pylint: disable=unnecessary-lambda self._delay(lambda: self.asyncLogfile.finish()) def unwrap(self): d = defer.Deferred() self._delay(lambda: d.callback(self.asyncLogfile)) return d # read-only methods def getName(self): return self.name def getText(self): return "".join(self.getChunks([self.STDOUT, self.STDERR], onlyText=True)) def readlines(self): alltext = "".join(self.getChunks([self.STDOUT], onlyText=True)) io = NativeStringIO(alltext) return io.readlines() def getChunks(self, channels=None, onlyText=False): chunks = self.chunks if channels: channels = set(channels) chunks = ((c, t) for (c, t) in chunks if c in channels) if onlyText: chunks = (t for (c, t) in chunks) return chunks def isFinished(self): return self.finished def waitUntilFinished(self): d = defer.Deferred() self.finishDeferreds.append(d) self._maybeFinished() class BuildStepStatus: # used only for old-style steps pass @implementer(interfaces.IBuildStep) class BuildStep(results.ResultComputingConfigMixin, properties.PropertiesMixin, util.ComparableMixin): alwaysRun = False doStepIf = True hideStepIf = False compare_attrs = ("_factory",) # properties set on a build step are, by nature, always runtime properties set_runtime_properties = True renderables = results.ResultComputingConfigMixin.resultConfig + [ 'alwaysRun', 'description', 'descriptionDone', 'descriptionSuffix', 'doStepIf', 'hideStepIf', 'workdir', ] # 'parms' holds a list of all the parameters we care about, to allow # users to instantiate a subclass of BuildStep with a mixture of # arguments, some of which are for us, some of which are for the subclass # (or a delegate of the subclass, like how ShellCommand delivers many # arguments to the RemoteShellCommand that it creates). Such delegating # subclasses will use this list to figure out which arguments are meant # for us and which should be given to someone else. parms = [ 'alwaysRun', 'description', 'descriptionDone', 'descriptionSuffix', 'doStepIf', 'flunkOnFailure', 'flunkOnWarnings', 'haltOnFailure', 'updateBuildSummaryPolicy', 'hideStepIf', 'locks', 'logEncoding', 'name', 'progressMetrics', 'useProgress', 'warnOnFailure', 'warnOnWarnings', 'workdir', ] name = "generic" description = None # set this to a list of short strings to override descriptionDone = None # alternate description when the step is complete descriptionSuffix = None # extra information to append to suffix updateBuildSummaryPolicy = None locks = [] progressMetrics = () # 'time' is implicit useProgress = True # set to False if step is really unpredictable build = None step_status = None progress = None logEncoding = None cmd = None rendered = False # true if attributes are rendered _workdir = None _waitingForLocks = False def _run_finished_hook(self): return None # override in tests def __init__(self, **kwargs): self.worker = None for p in self.__class__.parms: if p in kwargs: setattr(self, p, kwargs.pop(p)) if kwargs: config.error("%s.__init__ got unexpected keyword argument(s) %s" % (self.__class__, list(kwargs))) self._pendingLogObservers = [] if not isinstance(self.name, str) and not IRenderable.providedBy(self.name): config.error("BuildStep name must be a string or a renderable object: " "%r" % (self.name,)) if isinstance(self.description, str): self.description = [self.description] if isinstance(self.descriptionDone, str): self.descriptionDone = [self.descriptionDone] if isinstance(self.descriptionSuffix, str): self.descriptionSuffix = [self.descriptionSuffix] if self.updateBuildSummaryPolicy is None: # compute default value for updateBuildSummaryPolicy self.updateBuildSummaryPolicy = [EXCEPTION, RETRY, CANCELLED] if self.flunkOnFailure or self.haltOnFailure or self.warnOnFailure: self.updateBuildSummaryPolicy.append(FAILURE) if self.warnOnWarnings or self.flunkOnWarnings: self.updateBuildSummaryPolicy.append(WARNINGS) if self.updateBuildSummaryPolicy is False: self.updateBuildSummaryPolicy = [] if self.updateBuildSummaryPolicy is True: self.updateBuildSummaryPolicy = ALL_RESULTS if not isinstance(self.updateBuildSummaryPolicy, list): config.error("BuildStep updateBuildSummaryPolicy must be " "a list of result ids or boolean but it is %r" % (self.updateBuildSummaryPolicy,)) self._acquiringLocks = [] self.stopped = False self.master = None self.statistics = {} self.logs = {} self._running = False self.stepid = None self.results = None self._start_unhandled_deferreds = None def __new__(klass, *args, **kwargs): self = object.__new__(klass) self._factory = _BuildStepFactory(klass, *args, **kwargs) return self def __str__(self): args = [repr(x) for x in self._factory.args] args.extend([str(k) + "=" + repr(v) for k, v in self._factory.kwargs.items()]) return "{}({})".format( self.__class__.__name__, ", ".join(args)) __repr__ = __str__ def setBuild(self, build): self.build = build self.master = self.build.master def setWorker(self, worker): self.worker = worker @deprecate.deprecated(versions.Version("buildbot", 0, 9, 0)) def setDefaultWorkdir(self, workdir): if self._workdir is None: self._workdir = workdir @property def workdir(self): # default the workdir appropriately if self._workdir is not None or self.build is None: return self._workdir else: # see :ref:`Factory-Workdir-Functions` for details on how to # customize this if callable(self.build.workdir): try: return self.build.workdir(self.build.sources) except AttributeError as e: # if the callable raises an AttributeError # python thinks it is actually workdir that is not existing. # python will then swallow the attribute error and call # __getattr__ from worker_transition _, _, traceback = sys.exc_info() raise CallableAttributeError(e).with_traceback(traceback) # we re-raise the original exception by changing its type, # but keeping its stacktrace else: return self.build.workdir @workdir.setter def workdir(self, workdir): self._workdir = workdir def addFactoryArguments(self, **kwargs): # this is here for backwards compatibility pass def _getStepFactory(self): return self._factory def setupProgress(self): # this function temporarily does nothing pass def setProgress(self, metric, value): # this function temporarily does nothing pass def getCurrentSummary(self): if self.description is not None: stepsumm = util.join_list(self.description) if self.descriptionSuffix: stepsumm += ' ' + util.join_list(self.descriptionSuffix) else: stepsumm = 'running' return {'step': stepsumm} def getResultSummary(self): if self.descriptionDone is not None or self.description is not None: stepsumm = util.join_list(self.descriptionDone or self.description) if self.descriptionSuffix: stepsumm += ' ' + util.join_list(self.descriptionSuffix) else: stepsumm = 'finished' if self.results != SUCCESS: stepsumm += ' (%s)' % Results[self.results] return {'step': stepsumm} @defer.inlineCallbacks def getBuildResultSummary(self): summary = yield self.getResultSummary() if self.results in self.updateBuildSummaryPolicy and 'build' not in summary and 'step' in summary: summary['build'] = summary['step'] return summary @debounce.method(wait=1) @defer.inlineCallbacks def updateSummary(self): def methodInfo(m): lines = inspect.getsourcelines(m) return "\nat %s:%s:\n %s" % ( inspect.getsourcefile(m), lines[1], "\n".join(lines[0])) if not self._running: summary = yield self.getResultSummary() if not isinstance(summary, dict): raise TypeError('getResultSummary must return a dictionary: ' + methodInfo(self.getResultSummary)) else: summary = yield self.getCurrentSummary() if not isinstance(summary, dict): raise TypeError('getCurrentSummary must return a dictionary: ' + methodInfo(self.getCurrentSummary)) stepResult = summary.get('step', 'finished') if not isinstance(stepResult, str): raise TypeError("step result string must be unicode (got %r)" % (stepResult,)) if self.stepid is not None: stepResult = self.build.properties.cleanupTextFromSecrets( stepResult) yield self.master.data.updates.setStepStateString(self.stepid, stepResult) if not self._running: buildResult = summary.get('build', None) if buildResult and not isinstance(buildResult, str): raise TypeError("build result string must be unicode") # updateSummary gets patched out for old-style steps, so keep a copy we can # call internally for such steps realUpdateSummary = updateSummary @defer.inlineCallbacks def addStep(self): # create and start the step, noting that the name may be altered to # ensure uniqueness self.name = yield self.build.render(self.name) self.stepid, self.number, self.name = yield self.master.data.updates.addStep( buildid=self.build.buildid, name=util.bytes2unicode(self.name)) yield self.master.data.updates.startStep(self.stepid) @defer.inlineCallbacks def startStep(self, remote): self.remote = remote yield self.addStep() self.locks = yield self.build.render(self.locks) # convert all locks into their real form botmaster = self.build.builder.botmaster self.locks = yield botmaster.getLockFromLockAccesses(self.locks, self.build.config_version) # then narrow WorkerLocks down to the worker that this build is being # run on self.locks = [(l.getLockForWorker(self.build.workerforbuilder.worker), la) for l, la in self.locks] for l, la in self.locks: if l in self.build.locks: log.msg("Hey, lock %s is claimed by both a Step (%s) and the" " parent Build (%s)" % (l, self, self.build)) raise RuntimeError("lock claimed by both Step and Build") try: # set up locks yield self.acquireLocks() if self.stopped: raise BuildStepCancelled # render renderables in parallel renderables = [] accumulateClassList(self.__class__, 'renderables', renderables) def setRenderable(res, attr): setattr(self, attr, res) dl = [] for renderable in renderables: d = self.build.render(getattr(self, renderable)) d.addCallback(setRenderable, renderable) dl.append(d) yield defer.gatherResults(dl) self.rendered = True # we describe ourselves only when renderables are interpolated self.realUpdateSummary() # check doStepIf (after rendering) if isinstance(self.doStepIf, bool): doStep = self.doStepIf else: doStep = yield self.doStepIf(self) # run -- or skip -- the step if doStep: try: self._running = True self.results = yield self.run() finally: self._running = False else: self.results = SKIPPED # NOTE: all of these `except` blocks must set self.results immediately! except BuildStepCancelled: self.results = CANCELLED except BuildStepFailed: self.results = FAILURE except error.ConnectionLost: self.results = RETRY except Exception: self.results = EXCEPTION why = Failure() log.err(why, "BuildStep.failed; traceback follows") yield self.addLogWithFailure(why) if self.stopped and self.results != RETRY: # We handle this specially because we don't care about # the return code of an interrupted command; we know # that this should just be exception due to interrupt # At the same time we must respect RETRY status because it's used # to retry interrupted build due to some other issues for example # due to worker lost if self.results != CANCELLED: self.results = EXCEPTION # update the summary one last time, make sure that completes, # and then don't update it any more. self.realUpdateSummary() yield self.realUpdateSummary.stop() # determine whether we should hide this step hidden = self.hideStepIf if callable(hidden): try: hidden = hidden(self.results, self) except Exception: why = Failure() log.err(why, "hidden callback failed; traceback follows") yield self.addLogWithFailure(why) self.results = EXCEPTION hidden = False yield self.master.data.updates.finishStep(self.stepid, self.results, hidden) # finish unfinished logs all_finished = yield self.finishUnfinishedLogs() if not all_finished: self.results = EXCEPTION self.releaseLocks() return self.results @defer.inlineCallbacks def finishUnfinishedLogs(self): ok = True not_finished_logs = [v for (k, v) in self.logs.items() if not v.finished] finish_logs = yield defer.DeferredList([v.finish() for v in not_finished_logs], consumeErrors=True) for success, res in finish_logs: if not success: log.err(res, "when trying to finish a log") ok = False return ok def acquireLocks(self, res=None): if not self.locks: return defer.succeed(None) if self.stopped: return defer.succeed(None) log.msg("acquireLocks(step %s, locks %s)" % (self, self.locks)) for lock, access in self.locks: for waited_lock, _, _ in self._acquiringLocks: if lock is waited_lock: continue if not lock.isAvailable(self, access): self._waitingForLocks = True log.msg("step %s waiting for lock %s" % (self, lock)) d = lock.waitUntilMaybeAvailable(self, access) self._acquiringLocks.append((lock, access, d)) d.addCallback(self.acquireLocks) return d # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) self._acquiringLocks = [] self._waitingForLocks = False return defer.succeed(None) @defer.inlineCallbacks def run(self): self._start_deferred = defer.Deferred() unhandled = self._start_unhandled_deferreds = [] self._sync_addlog_deferreds = [] try: # here's where we set things up for backward compatibility for # old-style steps, using monkey patches so that new-style steps # aren't bothered by any of this equipment # monkey-patch self.step_status.{setText,setText2} back into # existence for old steps, signalling an update to the summary self.step_status = BuildStepStatus() self.step_status.setText = lambda text: self.realUpdateSummary() self.step_status.setText2 = lambda text: self.realUpdateSummary() # monkey-patch in support for old statistics functions self.step_status.setStatistic = self.setStatistic self.step_status.getStatistic = self.getStatistic self.step_status.hasStatistic = self.hasStatistic # monkey-patch an addLog that returns an write-only, sync log self.addLog = self.addLog_oldStyle self._logFileWrappers = {} # and a getLog that returns a read-only, sync log, captured by # LogObservers installed by addLog_oldStyle self.getLog = self.getLog_oldStyle # old-style steps shouldn't be calling updateSummary def updateSummary(): assert 0, 'updateSummary is only valid on new-style steps' self.updateSummary = updateSummary results = yield self.start() if results is not None: self._start_deferred.callback(results) results = yield self._start_deferred finally: # hook for tests # assert so that it is only run in non optimized mode assert self._run_finished_hook() is None # wait until all the sync logs have been actually created before # finishing yield defer.DeferredList(self._sync_addlog_deferreds, consumeErrors=True) self._start_deferred = None unhandled = self._start_unhandled_deferreds self.realUpdateSummary() # Wait for any possibly-unhandled deferreds. If any fail, change the # result to EXCEPTION and log. while unhandled: self._start_unhandled_deferreds = [] unhandled_results = yield defer.DeferredList(unhandled, consumeErrors=True) for success, res in unhandled_results: if not success: log.err( res, "from an asynchronous method executed in an old-style step") results = EXCEPTION unhandled = self._start_unhandled_deferreds return results def finished(self, results): assert self._start_deferred, \ "finished() can only be called from old steps implementing start()" self._start_deferred.callback(results) def failed(self, why): assert self._start_deferred, \ "failed() can only be called from old steps implementing start()" self._start_deferred.errback(why) def isNewStyle(self): # **temporary** method until new-style steps are the only supported style return self.run.__func__ is not BuildStep.run def start(self): # New-style classes implement 'run'. # Old-style classes implemented 'start'. Advise them to do 'run' # instead. raise NotImplementedError("your subclass must implement run()") def interrupt(self, reason): self.stopped = True if self._acquiringLocks: for (lock, access, d) in self._acquiringLocks: lock.stopWaitingUntilAvailable(self, access, d) self._acquiringLocks = [] if self._waitingForLocks: self.addCompleteLog( 'cancelled while waiting for locks', str(reason)) else: self.addCompleteLog('cancelled', str(reason)) if self.cmd: d = self.cmd.interrupt(reason) d.addErrback(log.err, 'while cancelling command') def releaseLocks(self): log.msg("releaseLocks(%s): %s" % (self, self.locks)) for lock, access in self.locks: if lock.isOwner(self, access): lock.release(self, access) else: # This should only happen if we've been interrupted assert self.stopped # utility methods that BuildSteps may find useful def workerVersion(self, command, oldversion=None): return self.build.getWorkerCommandVersion(command, oldversion) def workerVersionIsOlderThan(self, command, minversion): sv = self.build.getWorkerCommandVersion(command, None) if sv is None: return True if [int(s) for s in sv.split(".")] < [int(m) for m in minversion.split(".")]: return True return False def checkWorkerHasCommand(self, command): if not self.workerVersion(command): message = "worker is too old, does not know about %s" % command raise WorkerTooOldError(message) def getWorkerName(self): return self.build.getWorkerName() def addLog(self, name, type='s', logEncoding=None): if self.stepid is None: raise BuildStepCancelled d = self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), str(type)) @d.addCallback def newLog(logid): return self._newLog(name, type, logid, logEncoding) return d addLog_newStyle = addLog def addLog_oldStyle(self, name, type='s', logEncoding=None): # create a logfile instance that acts like old-style status logfiles # begin to create a new-style logfile loog_d = self.addLog_newStyle(name, type, logEncoding) self._start_unhandled_deferreds.append(loog_d) # and wrap the deferred that will eventually fire with that logfile # into a write-only logfile instance wrapper = SyncLogFileWrapper(self, name, loog_d) self._logFileWrappers[name] = wrapper return wrapper def getLog(self, name): return self.logs[name] def getLog_oldStyle(self, name): return self._logFileWrappers[name] @_maybeUnhandled @defer.inlineCallbacks def addCompleteLog(self, name, text): if self.stepid is None: raise BuildStepCancelled logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 't') _log = self._newLog(name, 't', logid) yield _log.addContent(text) yield _log.finish() @_maybeUnhandled @defer.inlineCallbacks def addHTMLLog(self, name, html): if self.stepid is None: raise BuildStepCancelled logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 'h') _log = self._newLog(name, 'h', logid) html = bytes2unicode(html) yield _log.addContent(html) yield _log.finish() @defer.inlineCallbacks def addLogWithFailure(self, why, logprefix=""): # helper for showing exceptions to the users try: yield self.addCompleteLog(logprefix + "err.text", why.getTraceback()) yield self.addHTMLLog(logprefix + "err.html", formatFailure(why)) except Exception: log.err(Failure(), "error while formatting exceptions") def addLogWithException(self, why, logprefix=""): return self.addLogWithFailure(Failure(why), logprefix) def addLogObserver(self, logname, observer): assert interfaces.ILogObserver.providedBy(observer) observer.setStep(self) self._pendingLogObservers.append((logname, observer)) self._connectPendingLogObservers() def _newLog(self, name, type, logid, logEncoding=None): if not logEncoding: logEncoding = self.logEncoding if not logEncoding: logEncoding = self.master.config.logEncoding log = plog.Log.new(self.master, name, type, logid, logEncoding) self.logs[name] = log self._connectPendingLogObservers() return log def _connectPendingLogObservers(self): for logname, observer in self._pendingLogObservers[:]: if logname in self.logs: observer.setLog(self.logs[logname]) self._pendingLogObservers.remove((logname, observer)) @_maybeUnhandled @defer.inlineCallbacks def addURL(self, name, url): yield self.master.data.updates.addStepURL(self.stepid, str(name), str(url)) return None @defer.inlineCallbacks def runCommand(self, command): self.cmd = command command.worker = self.worker try: res = yield command.run(self, self.remote, self.build.builder.name) finally: self.cmd = None return res def hasStatistic(self, name): return name in self.statistics def getStatistic(self, name, default=None): return self.statistics.get(name, default) def getStatistics(self): return self.statistics.copy() def setStatistic(self, name, value): self.statistics[name] = value def _describe(self, done=False): # old-style steps expect this function to exist assert not self.isNewStyle() return [] def describe(self, done=False): # old-style steps expect this function to exist assert not self.isNewStyle() desc = self._describe(done) if not desc: return [] if self.descriptionSuffix: desc += self.descriptionSuffix return desc components.registerAdapter( BuildStep._getStepFactory, BuildStep, interfaces.IBuildStepFactory) components.registerAdapter( lambda step: interfaces.IProperties(step.build), BuildStep, interfaces.IProperties) class LoggingBuildStep(BuildStep): progressMetrics = ('output',) logfiles = {} parms = BuildStep.parms + ['logfiles', 'lazylogfiles', 'log_eval_func'] cmd = None renderables = ['logfiles', 'lazylogfiles'] def __init__(self, logfiles=None, lazylogfiles=False, log_eval_func=None, *args, **kwargs): super().__init__(*args, **kwargs) if logfiles is None: logfiles = {} if logfiles and not isinstance(logfiles, dict): config.error( "the ShellCommand 'logfiles' parameter must be a dictionary") # merge a class-level 'logfiles' attribute with one passed in as an # argument self.logfiles = self.logfiles.copy() self.logfiles.update(logfiles) self.lazylogfiles = lazylogfiles if log_eval_func and not callable(log_eval_func): config.error( "the 'log_eval_func' parameter must be a callable") self.log_eval_func = log_eval_func self.addLogObserver('stdio', OutputProgressObserver("output")) def isNewStyle(self): # LoggingBuildStep subclasses are never new-style return False def addLogFile(self, logname, filename): self.logfiles[logname] = filename def buildCommandKwargs(self): kwargs = dict() kwargs['logfiles'] = self.logfiles return kwargs def startCommand(self, cmd, errorMessages=None): if errorMessages is None: errorMessages = [] log.msg("ShellCommand.startCommand(cmd=%s)" % (cmd,)) log.msg(" cmd.args = %r" % (cmd.args)) self.cmd = cmd # so we can interrupt it # stdio is the first log self.stdio_log = stdio_log = self.addLog("stdio") cmd.useLog(stdio_log, closeWhenFinished=True) for em in errorMessages: stdio_log.addHeader(em) # TODO: consider setting up self.stdio_log earlier, and have the # code that passes in errorMessages instead call # self.stdio_log.addHeader() directly. # there might be other logs self.setupLogfiles(cmd, self.logfiles) d = self.runCommand(cmd) # might raise ConnectionLost d.addCallback(lambda res: self.commandComplete(cmd)) # TODO: when the status.LogFile object no longer exists, then this # method will a synthetic logfile for old-style steps, and to be called # without the `logs` parameter for new-style steps. Unfortunately, # lots of createSummary methods exist, but don't look at the log, so # it's difficult to optimize when the synthetic logfile is needed. d.addCallback(lambda res: self.createSummary(cmd.logs['stdio'])) d.addCallback(lambda res: self.evaluateCommand(cmd)) # returns results @d.addCallback def _gotResults(results): self.setStatus(cmd, results) return results d.addCallback(self.finished) d.addErrback(self.failed) def setupLogfiles(self, cmd, logfiles): for logname, remotefilename in logfiles.items(): if self.lazylogfiles: # Ask RemoteCommand to watch a logfile, but only add # it when/if we see any data. # # The dummy default argument local_logname is a work-around for # Python name binding; default values are bound by value, but # captured variables in the body are bound by name. def callback(cmd_arg, local_logname=logname): return self.addLog(local_logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, True) def checkDisconnect(self, f): # this is now handled by self.failed log.msg("WARNING: step %s uses deprecated checkDisconnect method") return f def commandComplete(self, cmd): pass def createSummary(self, stdio): pass def evaluateCommand(self, cmd): # NOTE: log_eval_func is undocumented, and will die with # LoggingBuildStep/ShellCOmmand if self.log_eval_func: # self.step_status probably doesn't have the desired behaviors, but # those were never well-defined.. return self.log_eval_func(cmd, self.step_status) return cmd.results() # TODO: delete def getText(self, cmd, results): if results == SUCCESS: return self.describe(True) elif results == WARNINGS: return self.describe(True) + ["warnings"] elif results == EXCEPTION: return self.describe(True) + ["exception"] elif results == CANCELLED: return self.describe(True) + ["cancelled"] return self.describe(True) + ["failed"] # TODO: delete def getText2(self, cmd, results): return [self.name] # TODO: delete def maybeGetText2(self, cmd, results): if results == SUCCESS: # successful steps do not add anything to the build's text pass elif results == WARNINGS: if (self.flunkOnWarnings or self.warnOnWarnings): # we're affecting the overall build, so tell them why return self.getText2(cmd, results) else: if (self.haltOnFailure or self.flunkOnFailure or self.warnOnFailure): # we're affecting the overall build, so tell them why return self.getText2(cmd, results) return [] def setStatus(self, cmd, results): self.realUpdateSummary() return defer.succeed(None) class CommandMixin: @defer.inlineCallbacks def _runRemoteCommand(self, cmd, abandonOnFailure, args, makeResult=None): cmd = remotecommand.RemoteCommand(cmd, args) try: log = self.getLog('stdio') except Exception: log = yield self.addLog('stdio') cmd.useLog(log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): raise BuildStepFailed() if makeResult: return makeResult(cmd) else: return not cmd.didFail() def runRmdir(self, dir, log=None, abandonOnFailure=True): return self._runRemoteCommand('rmdir', abandonOnFailure, {'dir': dir, 'logEnviron': False}) def pathExists(self, path, log=None): return self._runRemoteCommand('stat', False, {'file': path, 'logEnviron': False}) def runMkdir(self, dir, log=None, abandonOnFailure=True): return self._runRemoteCommand('mkdir', abandonOnFailure, {'dir': dir, 'logEnviron': False}) def runGlob(self, path): return self._runRemoteCommand( 'glob', True, {'path': path, 'logEnviron': False}, makeResult=lambda cmd: cmd.updates['files'][0]) class ShellMixin: command = None env = {} want_stdout = True want_stderr = True usePTY = None logfiles = {} lazylogfiles = {} timeout = 1200 maxTime = None logEnviron = True interruptSignal = 'KILL' sigtermTime = None initialStdin = None decodeRC = {0: SUCCESS} _shellMixinArgs = [ 'command', 'workdir', 'env', 'want_stdout', 'want_stderr', 'usePTY', 'logfiles', 'lazylogfiles', 'timeout', 'maxTime', 'logEnviron', 'interruptSignal', 'sigtermTime', 'initialStdin', 'decodeRC', ] renderables = _shellMixinArgs def setupShellMixin(self, constructorArgs, prohibitArgs=None): assert self.isNewStyle( ), "ShellMixin is only compatible with new-style steps" constructorArgs = constructorArgs.copy() if prohibitArgs is None: prohibitArgs = [] def bad(arg): config.error("invalid %s argument %s" % (self.__class__.__name__, arg)) for arg in self._shellMixinArgs: if arg not in constructorArgs: continue if arg in prohibitArgs: bad(arg) else: setattr(self, arg, constructorArgs[arg]) del constructorArgs[arg] for arg in list(constructorArgs): if arg not in BuildStep.parms: bad(arg) del constructorArgs[arg] return constructorArgs @defer.inlineCallbacks def makeRemoteShellCommand(self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides): kwargs = {arg: getattr(self, arg) for arg in self._shellMixinArgs} kwargs.update(overrides) stdio = None if stdioLogName is not None: # Reuse an existing log if possible; otherwise, create one. try: stdio = yield self.getLog(stdioLogName) except KeyError: stdio = yield self.addLog(stdioLogName) kwargs['command'] = flatten(kwargs['command'], (list, tuple)) # store command away for display self.command = kwargs['command'] # check for the usePTY flag if kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if kwargs["interruptSignal"] and self.workerVersionIsOlderThan("shell", "2.15"): if stdio is not None: yield stdio.addHeader( "NOTE: worker does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] # lazylogfiles are handled below del kwargs['lazylogfiles'] # merge the builder's environment with that supplied here builderEnv = self.build.builder.config.env kwargs['env'] = yield self.build.render(builderEnv) kwargs['env'].update(self.env) kwargs['stdioLogName'] = stdioLogName # default the workdir appropriately if not kwargs.get('workdir') and not self.workdir: if callable(self.build.workdir): kwargs['workdir'] = self.build.workdir(self.build.sources) else: kwargs['workdir'] = self.build.workdir # the rest of the args go to RemoteShellCommand cmd = remotecommand.RemoteShellCommand( collectStdout=collectStdout, collectStderr=collectStderr, **kwargs ) # set up logging if stdio is not None: cmd.useLog(stdio, False) for logname, remotefilename in self.logfiles.items(): if self.lazylogfiles: # it's OK if this does, or does not, return a Deferred def callback(cmd_arg, local_logname=logname): return self.addLog(local_logname) cmd.useLogDelayed(logname, callback, True) else: # add a LogFile newlog = yield self.addLog(logname) # and tell the RemoteCommand to feed it cmd.useLog(newlog, False) return cmd def getResultSummary(self): summary = util.command_to_string(self.command) if not summary: return super(ShellMixin, self).getResultSummary() return {'step': summary} # Parses the logs for a list of regexs. Meant to be invoked like: # regexes = ((re.compile(...), FAILURE), (re.compile(...), WARNINGS)) # self.addStep(ShellCommand, # command=..., # ..., # log_eval_func=lambda c,s: regex_log_evaluator(c, s, regexs) # ) # NOTE: log_eval_func is undocumented, and will die with # LoggingBuildStep/ShellCOmmand def regex_log_evaluator(cmd, _, regexes): worst = cmd.results() for err, possible_status in regexes: # worst_status returns the worse of the two status' passed to it. # we won't be changing "worst" unless possible_status is worse than it, # so we don't even need to check the log if that's the case if worst_status(worst, possible_status) == possible_status: if isinstance(err, str): err = re.compile(".*%s.*" % err, re.DOTALL) for l in cmd.logs.values(): if err.search(l.getText()): worst = possible_status return worst _hush_pyflakes = [WithProperties] del _hush_pyflakes buildbot-2.6.0/master/buildbot/process/cache.py000066400000000000000000000055151361162603000215210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.util import lru from buildbot.util import service class CacheManager(service.ReconfigurableServiceMixin, service.AsyncService): """ A manager for a collection of caches, each for different types of objects and with potentially-overlapping key spaces. There is generally only one instance of this class, available at C{master.caches}. """ # a cache of length one still has many benefits: it collects objects that # remain referenced elsewhere; it collapses simultaneous misses into one # miss function; and it will optimize repeated fetches of the same object. DEFAULT_CACHE_SIZE = 1 def __init__(self): self.setName('caches') self.config = {} self._caches = {} def get_cache(self, cache_name, miss_fn): """ Get an L{AsyncLRUCache} object with the given name. If such an object does not exist, it will be created. Since the cache is permanent, this method can be called only once, e.g., in C{startService}, and it value stored indefinitely. @param cache_name: name of the cache (usually the name of the type of object it stores) @param miss_fn: miss function for the cache; see L{AsyncLRUCache} constructor. @returns: L{AsyncLRUCache} instance """ try: return self._caches[cache_name] except KeyError: max_size = self.config.get(cache_name, self.DEFAULT_CACHE_SIZE) assert max_size >= 1 c = self._caches[cache_name] = lru.AsyncLRUCache(miss_fn, max_size) return c def reconfigServiceWithBuildbotConfig(self, new_config): self.config = new_config.caches for name, cache in self._caches.items(): cache.set_max_size(new_config.caches.get(name, self.DEFAULT_CACHE_SIZE)) return super().reconfigServiceWithBuildbotConfig(new_config) def get_metrics(self): return { n: {'hits': c.hits, 'refhits': c.refhits, 'misses': c.misses, 'max_size': c.max_size} for n, c in self._caches.items()} buildbot-2.6.0/master/buildbot/process/debug.py000066400000000000000000000033731361162603000215440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class DebugServices(service.ReconfigurableServiceMixin, service.AsyncMultiService): name = 'debug_services' def __init__(self): super().__init__() self.debug_port = None self.debug_password = None self.debug_registration = None self.manhole = None @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if new_config.manhole != self.manhole: if self.manhole: yield self.manhole.disownServiceParent() self.manhole = None if new_config.manhole: self.manhole = new_config.manhole yield self.manhole.setServiceParent(self) # chain up yield super().reconfigServiceWithBuildbotConfig(new_config) @defer.inlineCallbacks def stopService(self): # manhole will get stopped as a sub-service yield super().stopService() # clean up if self.manhole: self.manhole = None buildbot-2.6.0/master/buildbot/process/factory.py000066400000000000000000000232661361162603000221300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from contextlib import contextmanager from twisted.python import deprecate from twisted.python import versions from buildbot import interfaces from buildbot import util from buildbot.process.build import Build from buildbot.process.buildstep import BuildStep from buildbot.steps.download_secret_to_worker import DownloadSecretsToWorker from buildbot.steps.download_secret_to_worker import RemoveWorkerFileSecret from buildbot.steps.shell import Compile from buildbot.steps.shell import Configure from buildbot.steps.shell import PerlModuleTest from buildbot.steps.shell import ShellCommand from buildbot.steps.shell import Test from buildbot.steps.source.cvs import CVS from buildbot.steps.source.svn import SVN # deprecated, use BuildFactory.addStep @deprecate.deprecated(versions.Version("buildbot", 0, 8, 6)) def s(steptype, **kwargs): # convenience function for master.cfg files, to create step # specification tuples return interfaces.IBuildStepFactory(steptype(**kwargs)) class BuildFactory(util.ComparableMixin): """ @cvar buildClass: class to use when creating builds @type buildClass: L{buildbot.process.build.Build} """ buildClass = Build useProgress = 1 workdir = "build" compare_attrs = ('buildClass', 'steps', 'useProgress', 'workdir') def __init__(self, steps=None): self.steps = [] if steps: self.addSteps(steps) def newBuild(self, requests): """Create a new Build instance. @param requests: a list of buildrequest dictionaries describing what is to be built """ b = self.buildClass(requests) b.useProgress = self.useProgress b.workdir = self.workdir b.setStepFactories(self.steps) return b def addStep(self, step, **kwargs): if kwargs or (isinstance(step, type(BuildStep)) and issubclass(step, BuildStep)): warnings.warn( "Passing a BuildStep subclass to factory.addStep is " "deprecated. Please pass a BuildStep instance instead.", DeprecationWarning, stacklevel=2) step = step(**kwargs) self.steps.append(interfaces.IBuildStepFactory(step)) def addSteps(self, steps, withSecrets=None): if withSecrets is None: withSecrets = [] if withSecrets: self.addStep(DownloadSecretsToWorker(withSecrets)) for s in steps: self.addStep(s) if withSecrets: self.addStep(RemoveWorkerFileSecret(withSecrets)) @contextmanager def withSecrets(self, secrets): self.addStep(DownloadSecretsToWorker(secrets)) yield self self.addStep(RemoveWorkerFileSecret(secrets)) # BuildFactory subclasses for common build tools class _DefaultCommand: # Used to indicate a default command to the step. pass class GNUAutoconf(BuildFactory): def __init__(self, source, configure="./configure", configureEnv=None, configureFlags=None, reconf=None, compile=_DefaultCommand, test=_DefaultCommand, distcheck=_DefaultCommand): if configureEnv is None: configureEnv = {} if configureFlags is None: configureFlags = [] if compile is _DefaultCommand: compile = ["make", "all"] if test is _DefaultCommand: test = ["make", "check"] if distcheck is _DefaultCommand: distcheck = ["make", "distcheck"] super().__init__([source]) if reconf is True: reconf = ["autoreconf", "-si"] if reconf is not None: self.addStep( ShellCommand(name="autoreconf", command=reconf, env=configureEnv)) if configure is not None: # we either need to wind up with a string (which will be # space-split), or with a list of strings (which will not). The # list of strings is the preferred form. if isinstance(configure, str): if configureFlags: assert " " not in configure # please use list instead command = [configure] + configureFlags else: command = configure else: assert isinstance(configure, (list, tuple)) command = configure + configureFlags self.addStep(Configure(command=command, env=configureEnv)) if compile is not None: self.addStep(Compile(command=compile, env=configureEnv)) if test is not None: self.addStep(Test(command=test, env=configureEnv)) if distcheck is not None: self.addStep(Test(command=distcheck, env=configureEnv)) class CPAN(BuildFactory): def __init__(self, source, perl="perl"): super().__init__([source]) self.addStep(Configure(command=[perl, "Makefile.PL"])) self.addStep(Compile(command=["make"])) self.addStep(PerlModuleTest(command=["make", "test"])) class Distutils(BuildFactory): def __init__(self, source, python="python", test=None): super().__init__([source]) self.addStep(Compile(command=[python, "./setup.py", "build"])) if test is not None: self.addStep(Test(command=test)) class Trial(BuildFactory): """Build a python module that uses distutils and trial. Set 'tests' to the module in which the tests can be found, or set useTestCaseNames=True to always have trial figure out which tests to run (based upon which files have been changed). See docs/factories.xhtml for usage samples. Not all of the Trial BuildStep options are available here, only the most commonly used ones. To get complete access, you will need to create a custom BuildFactory.""" trial = "trial" randomly = False recurse = False def __init__(self, source, buildpython=None, trialpython=None, trial=None, testpath=".", randomly=None, recurse=None, tests=None, useTestCaseNames=False, env=None): super().__init__([source]) assert tests or useTestCaseNames, "must use one or the other" if buildpython is None: buildpython = ["python"] if trialpython is None: trialpython = [] if trial is not None: self.trial = trial if randomly is not None: self.randomly = randomly if recurse is not None: self.recurse = recurse from buildbot.steps.python_twisted import Trial buildcommand = buildpython + ["./setup.py", "build"] self.addStep(Compile(command=buildcommand, env=env)) self.addStep(Trial( python=trialpython, trial=self.trial, testpath=testpath, tests=tests, testChanges=useTestCaseNames, randomly=self.randomly, recurse=self.recurse, env=env, )) # compatibility classes, will go away. Note that these only offer # compatibility at the constructor level: if you have subclassed these # factories, your subclasses are unlikely to still work correctly. ConfigurableBuildFactory = BuildFactory class BasicBuildFactory(GNUAutoconf): # really a "GNU Autoconf-created tarball -in-CVS tree" builder def __init__(self, cvsroot, cvsmodule, configure=None, configureEnv=None, compile="make all", test="make check", cvsCopy=False): if configureEnv is None: configureEnv = {} mode = "full" method = "clobber" if cvsCopy: method = "copy" source = CVS( cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode, method=method) super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) class QuickBuildFactory(BasicBuildFactory): useProgress = False def __init__(self, cvsroot, cvsmodule, configure=None, configureEnv=None, compile="make all", test="make check", cvsCopy=False): if configureEnv is None: configureEnv = {} mode = "incremental" source = CVS(cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode) super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) class BasicSVN(GNUAutoconf): def __init__(self, svnurl, configure=None, configureEnv=None, compile="make all", test="make check"): if configureEnv is None: configureEnv = {} source = SVN(svnurl=svnurl, mode="incremental") super().__init__(source, configure=configure, configureEnv=configureEnv, compile=compile, test=test) buildbot-2.6.0/master/buildbot/process/log.py000066400000000000000000000130641361162603000212350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.util import lineboundaries class Log: _byType = {} def __init__(self, master, name, type, logid, decoder): self.type = type self.logid = logid self.master = master self.name = name self.subPoint = util.subscription.SubscriptionPoint("%r log" % (name,)) self.subscriptions = {} self.finished = False self.finishWaiters = [] self.lock = defer.DeferredLock() self.decoder = decoder @staticmethod def _decoderFromString(cfg): """ Return a decoder function. If cfg is a string such as 'latin-1' or u'latin-1', then we return a new lambda, s.decode(). If cfg is already a lambda or function, then we return that. """ if isinstance(cfg, (bytes, str)): return lambda s: s.decode(cfg, 'replace') return cfg @classmethod def new(cls, master, name, type, logid, logEncoding): type = str(type) try: subcls = cls._byType[type] except KeyError: raise RuntimeError("Invalid log type %r" % (type,)) decoder = Log._decoderFromString(logEncoding) return subcls(master, name, type, logid, decoder) def getName(self): return self.name # subscriptions def subscribe(self, callback): return self.subPoint.subscribe(callback) # adding lines @defer.inlineCallbacks def addRawLines(self, lines): # used by subclasses to add lines that are already appropriately # formatted for the log type, and newline-terminated assert lines[-1] == '\n' assert not self.finished yield self.lock.run(lambda: self.master.data.updates.appendLog(self.logid, lines)) # completion def isFinished(self): return self.finished def waitUntilFinished(self): d = defer.Deferred() if self.finished: d.succeed(None) else: self.finishWaiters.append(d) return d @defer.inlineCallbacks def finish(self): assert not self.finished def fToRun(): self.finished = True return self.master.data.updates.finishLog(self.logid) yield self.lock.run(fToRun) # notify subscribers *after* finishing the log self.subPoint.deliver(None, None) # notify those waiting for finish for d in self.finishWaiters: d.callback(None) # start a compressLog call but don't make our caller wait for # it to complete d = self.master.data.updates.compressLog(self.logid) d.addErrback( log.err, "while compressing log %d (ignored)" % self.logid) class PlainLog(Log): def __init__(self, master, name, type, logid, decoder): super(PlainLog, self).__init__(master, name, type, logid, decoder) def wholeLines(lines): if not isinstance(lines, str): lines = self.decoder(lines) self.subPoint.deliver(None, lines) return self.addRawLines(lines) self.lbf = lineboundaries.LineBoundaryFinder(wholeLines) def addContent(self, text): # add some text in the log's default stream return self.lbf.append(text) @defer.inlineCallbacks def finish(self): yield self.lbf.flush() yield super(PlainLog, self).finish() class TextLog(PlainLog): pass Log._byType['t'] = TextLog class HtmlLog(PlainLog): pass Log._byType['h'] = HtmlLog class StreamLog(Log): pat = re.compile('^', re.M) def __init__(self, step, name, type, logid, decoder): super(StreamLog, self).__init__(step, name, type, logid, decoder) self.lbfs = {} def _getLbf(self, stream): try: return self.lbfs[stream] except KeyError: def wholeLines(lines): if not isinstance(lines, str): lines = self.decoder(lines) # deliver the un-annotated version to subscribers self.subPoint.deliver(stream, lines) # strip the last character, as the regexp will add a # prefix character after the trailing newline return self.addRawLines(self.pat.sub(stream, lines)[:-1]) lbf = self.lbfs[stream] = \ lineboundaries.LineBoundaryFinder(wholeLines) return lbf def addStdout(self, text): return self._getLbf('o').append(text) def addStderr(self, text): return self._getLbf('e').append(text) def addHeader(self, text): return self._getLbf('h').append(text) @defer.inlineCallbacks def finish(self): for lbf in self.lbfs.values(): yield lbf.flush() yield super(StreamLog, self).finish() Log._byType['s'] = StreamLog buildbot-2.6.0/master/buildbot/process/logobserver.py000066400000000000000000000114101361162603000227760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot import interfaces @implementer(interfaces.ILogObserver) class LogObserver: def setStep(self, step): self.step = step def setLog(self, loog): loog.subscribe(self.gotData) def gotData(self, stream, data): if data is None: self.finishReceived() elif stream is None or stream == 'o': self.outReceived(data) elif stream == 'e': self.errReceived(data) elif stream == 'h': self.headerReceived(data) def finishReceived(self): pass def outReceived(self, data): pass def errReceived(self, data): pass def headerReceived(self, data): pass class LogLineObserver(LogObserver): stdoutDelimiter = "\n" stderrDelimiter = "\n" headerDelimiter = "\n" def __init__(self): super().__init__() self.max_length = 16384 def setMaxLineLength(self, max_length): """ Set the maximum line length: lines longer than max_length are dropped. Default is 16384 bytes. Use sys.maxint for effective infinity. """ self.max_length = max_length def _lineReceived(self, data, delimiter, funcReceived): for line in data.rstrip().split(delimiter): if len(line) > self.max_length: continue funcReceived(line) def outReceived(self, data): self._lineReceived(data, self.stdoutDelimiter, self.outLineReceived) def errReceived(self, data): self._lineReceived(data, self.stderrDelimiter, self.errLineReceived) def headerReceived(self, data): self._lineReceived(data, self.headerDelimiter, self.headerLineReceived) def outLineReceived(self, line): """This will be called with complete stdout lines (not including the delimiter). Override this in your observer.""" def errLineReceived(self, line): """This will be called with complete lines of stderr (not including the delimiter). Override this in your observer.""" def headerLineReceived(self, line): """This will be called with complete lines of stderr (not including the delimiter). Override this in your observer.""" class LineConsumerLogObserver(LogLineObserver): def __init__(self, consumerFunction): super().__init__() self.generator = None self.consumerFunction = consumerFunction def feed(self, input): # note that we defer starting the generator until the first bit of # data, since the observer may be instantiated during configuration as # well as for each execution of the step. self.generator = self.consumerFunction() next(self.generator) # shortcut all remaining feed operations self.feed = self.generator.send self.feed(input) def outLineReceived(self, line): self.feed(('o', line)) def errLineReceived(self, line): self.feed(('e', line)) def headerLineReceived(self, line): self.feed(('h', line)) def finishReceived(self): if self.generator: self.generator.close() class OutputProgressObserver(LogObserver): length = 0 def __init__(self, name): self.name = name def gotData(self, stream, data): if data: self.length += len(data) self.step.setProgress(self.name, self.length) class BufferLogObserver(LogObserver): def __init__(self, wantStdout=True, wantStderr=False): super().__init__() self.stdout = [] if wantStdout else None self.stderr = [] if wantStderr else None def outReceived(self, data): if self.stdout is not None: self.stdout.append(data) def errReceived(self, data): if self.stderr is not None: self.stderr.append(data) def _get(self, chunks): if chunks is None or not chunks: return '' return ''.join(chunks) def getStdout(self): return self._get(self.stdout) def getStderr(self): return self._get(self.stderr) buildbot-2.6.0/master/buildbot/process/measured_service.py000066400000000000000000000026261361162603000240030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import metrics from buildbot.util.service import BuildbotServiceManager class MeasuredBuildbotServiceManager(BuildbotServiceManager): managed_services_name = "services" @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): timer = metrics.Timer( "{0}.reconfigServiceWithBuildbotConfig".format(self.name)) timer.start() yield super(MeasuredBuildbotServiceManager, self).reconfigServiceWithBuildbotConfig(new_config) metrics.MetricCountEvent.log("num_{0}".format(self.managed_services_name), len(list(self)), absolute=True) timer.stop() buildbot-2.6.0/master/buildbot/process/metrics.py000066400000000000000000000332111361162603000221160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members r""" Buildbot metrics module Keeps track of counts and timings of various internal buildbot activities. Basic architecture: MetricEvent.log(...) || \/ MetricLogObserver || \/ MetricHandler || \/ MetricWatcher """ import gc import os import sys from collections import defaultdict from collections import deque from twisted.application import service from twisted.internet import reactor from twisted.internet.task import LoopingCall from twisted.python import log from buildbot import util from buildbot.util import service as util_service # Make use of the resource module if we can try: import resource assert resource except ImportError: resource = None class MetricEvent: @classmethod def log(cls, *args, **kwargs): log.msg(metric=cls(*args, **kwargs)) class MetricCountEvent(MetricEvent): def __init__(self, counter, count=1, absolute=False): self.counter = counter self.count = count self.absolute = absolute class MetricTimeEvent(MetricEvent): def __init__(self, timer, elapsed): self.timer = timer self.elapsed = elapsed ALARM_OK, ALARM_WARN, ALARM_CRIT = list(range(3)) ALARM_TEXT = ["OK", "WARN", "CRIT"] class MetricAlarmEvent(MetricEvent): def __init__(self, alarm, msg=None, level=ALARM_OK): self.alarm = alarm self.level = level self.msg = msg def countMethod(counter): def decorator(func): def wrapper(*args, **kwargs): MetricCountEvent.log(counter=counter) return func(*args, **kwargs) return wrapper return decorator class Timer: # For testing _reactor = None def __init__(self, name): self.name = name self.started = None def startTimer(self, func): def wrapper(*args, **kwargs): self.start() return func(*args, **kwargs) return wrapper def stopTimer(self, func): def wrapper(*args, **kwargs): try: return func(*args, **kwargs) finally: self.stop() return wrapper def start(self): self.started = util.now(self._reactor) def stop(self): if self.started is not None: elapsed = util.now(self._reactor) - self.started MetricTimeEvent.log(timer=self.name, elapsed=elapsed) self.started = None def timeMethod(name, _reactor=None): def decorator(func): t = Timer(name) t._reactor = _reactor def wrapper(*args, **kwargs): t.start() try: return func(*args, **kwargs) finally: t.stop() return wrapper return decorator class FiniteList(deque): def __init__(self, maxlen=10): self._maxlen = maxlen super().__init__() def append(self, o): deque.append(self, o) if len(self) > self._maxlen: self.popleft() class AveragingFiniteList(FiniteList): def __init__(self, maxlen=10): super().__init__(maxlen) self.average = 0 def append(self, o): super().append(o) self._calc() def _calc(self): if not self: self.average = 0 else: self.average = float(sum(self)) / len(self) return self.average class MetricHandler: def __init__(self, metrics): self.metrics = metrics self.watchers = [] self.reset() def addWatcher(self, watcher): self.watchers.append(watcher) def removeWatcher(self, watcher): self.watchers.remove(watcher) # For subclasses to define def reset(self): raise NotImplementedError def handle(self, eventDict, metric): raise NotImplementedError def get(self, metric): raise NotImplementedError def keys(self): raise NotImplementedError def report(self): raise NotImplementedError def asDict(self): raise NotImplementedError class MetricCountHandler(MetricHandler): _counters = None def reset(self): self._counters = defaultdict(int) def handle(self, eventDict, metric): if metric.absolute: self._counters[metric.counter] = metric.count else: self._counters[metric.counter] += metric.count def keys(self): return list(self._counters) def get(self, counter): return self._counters[counter] def report(self): retval = [] for counter in sorted(self.keys()): retval.append("Counter %s: %i" % (counter, self.get(counter))) return "\n".join(retval) def asDict(self): retval = {} for counter in sorted(self.keys()): retval[counter] = self.get(counter) return dict(counters=retval) class MetricTimeHandler(MetricHandler): _timers = None def reset(self): self._timers = defaultdict(AveragingFiniteList) def handle(self, eventDict, metric): self._timers[metric.timer].append(metric.elapsed) def keys(self): return list(self._timers) def get(self, timer): return self._timers[timer].average def report(self): retval = [] for timer in sorted(self.keys()): retval.append("Timer %s: %.3g" % (timer, self.get(timer))) return "\n".join(retval) def asDict(self): retval = {} for timer in sorted(self.keys()): retval[timer] = self.get(timer) return dict(timers=retval) class MetricAlarmHandler(MetricHandler): _alarms = None def reset(self): self._alarms = defaultdict(lambda x: ALARM_OK) def handle(self, eventDict, metric): self._alarms[metric.alarm] = (metric.level, metric.msg) def report(self): retval = [] for alarm, (level, msg) in sorted(self._alarms.items()): if msg: retval.append("%s %s: %s" % (ALARM_TEXT[level], alarm, msg)) else: retval.append("%s %s" % (ALARM_TEXT[level], alarm)) return "\n".join(retval) def asDict(self): retval = {} for alarm, (level, msg) in sorted(self._alarms.items()): retval[alarm] = (ALARM_TEXT[level], msg) return dict(alarms=retval) class AttachedWorkersWatcher: def __init__(self, metrics): self.metrics = metrics def run(self): # Check if 'BotMaster.attached_workers' equals # 'AbstractWorker.attached_workers' h = self.metrics.getHandler(MetricCountEvent) if not h: log.msg("Couldn't get MetricCountEvent handler") MetricAlarmEvent.log('AttachedWorkersWatcher', msg="Coudln't get MetricCountEvent handler", level=ALARM_WARN) return botmaster_count = h.get('BotMaster.attached_workers') worker_count = h.get('AbstractWorker.attached_workers') # We let these be off by one since they're counted at slightly # different times if abs(botmaster_count - worker_count) > 1: level = ALARM_WARN else: level = ALARM_OK MetricAlarmEvent.log('attached_workers', msg='%s %s' % (botmaster_count, worker_count), level=level) def _get_rss(): if sys.platform == 'linux': try: with open("/proc/%i/statm" % os.getpid()) as f: return int(f.read().split()[1]) except Exception: return 0 return 0 def periodicCheck(_reactor=reactor): try: # Measure how much garbage we have garbage_count = len(gc.garbage) MetricCountEvent.log('gc.garbage', garbage_count, absolute=True) if garbage_count == 0: level = ALARM_OK else: level = ALARM_WARN MetricAlarmEvent.log('gc.garbage', level=level) if resource: r = resource.getrusage(resource.RUSAGE_SELF) attrs = ['ru_utime', 'ru_stime', 'ru_maxrss', 'ru_ixrss', 'ru_idrss', 'ru_isrss', 'ru_minflt', 'ru_majflt', 'ru_nswap', 'ru_inblock', 'ru_oublock', 'ru_msgsnd', 'ru_msgrcv', 'ru_nsignals', 'ru_nvcsw', 'ru_nivcsw'] for i, a in enumerate(attrs): # Linux versions prior to 2.6.32 didn't report this value, but we # can calculate it from /proc//statm v = r[i] if a == 'ru_maxrss' and v == 0: v = _get_rss() * resource.getpagesize() / 1024 MetricCountEvent.log('resource.%s' % a, v, absolute=True) MetricCountEvent.log( 'resource.pagesize', resource.getpagesize(), absolute=True) # Measure the reactor delay then = util.now(_reactor) dt = 0.1 def cb(): now = util.now(_reactor) delay = (now - then) - dt MetricTimeEvent.log("reactorDelay", delay) _reactor.callLater(dt, cb) except Exception: log.err(None, "while collecting VM metrics") class MetricLogObserver(util_service.ReconfigurableServiceMixin, service.MultiService): _reactor = reactor def __init__(self): super().__init__() self.setName('metrics') self.enabled = False self.periodic_task = None self.periodic_interval = None self.log_task = None self.log_interval = None # Mapping of metric type to handlers for that type self.handlers = {} # Register our default handlers self.registerHandler(MetricCountEvent, MetricCountHandler(self)) self.registerHandler(MetricTimeEvent, MetricTimeHandler(self)) self.registerHandler(MetricAlarmEvent, MetricAlarmHandler(self)) self.getHandler(MetricCountEvent).addWatcher( AttachedWorkersWatcher(self)) def reconfigServiceWithBuildbotConfig(self, new_config): # first, enable or disable if new_config.metrics is None: self.disable() else: self.enable() metrics_config = new_config.metrics # Start up periodic logging log_interval = metrics_config.get('log_interval', 60) if log_interval != self.log_interval: if self.log_task: self.log_task.stop() self.log_task = None if log_interval: self.log_task = LoopingCall(self.report) self.log_task.clock = self._reactor self.log_task.start(log_interval) # same for the periodic task periodic_interval = metrics_config.get('periodic_interval', 10) if periodic_interval != self.periodic_interval: if self.periodic_task: self.periodic_task.stop() self.periodic_task = None if periodic_interval: self.periodic_task = LoopingCall(periodicCheck, self._reactor) self.periodic_task.clock = self._reactor self.periodic_task.start(periodic_interval) # upcall return super().reconfigServiceWithBuildbotConfig(new_config) def stopService(self): self.disable() super().stopService() def enable(self): if self.enabled: return log.addObserver(self.emit) self.enabled = True def disable(self): if not self.enabled: return if self.periodic_task: self.periodic_task.stop() self.periodic_task = None if self.log_task: self.log_task.stop() self.log_task = None log.removeObserver(self.emit) self.enabled = False def registerHandler(self, interface, handler): old = self.getHandler(interface) self.handlers[interface] = handler return old def getHandler(self, interface): return self.handlers.get(interface) def emit(self, eventDict): # Ignore non-statistic events metric = eventDict.get('metric') if not metric or not isinstance(metric, MetricEvent): return if metric.__class__ not in self.handlers: return h = self.handlers[metric.__class__] h.handle(eventDict, metric) for w in h.watchers: w.run() def asDict(self): retval = {} for interface, handler in self.handlers.items(): retval.update(handler.asDict()) return retval def report(self): try: for interface, handler in self.handlers.items(): report = handler.report() if not report: continue for line in report.split("\n"): log.msg(line) except Exception: log.err(None, "generating metric report") buildbot-2.6.0/master/buildbot/process/properties.py000066400000000000000000000736721361162603000226630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import collections import json import re import weakref from twisted.internet import defer from twisted.python.components import registerAdapter from zope.interface import implementer from buildbot import config from buildbot import util from buildbot.interfaces import IProperties from buildbot.interfaces import IRenderable from buildbot.util import flatten @implementer(IProperties) class Properties(util.ComparableMixin): """ I represent a set of properties that can be interpolated into various strings in buildsteps. @ivar properties: dictionary mapping property values to tuples (value, source), where source is a string identifying the source of the property. Objects of this class can be read like a dictionary -- in this case, only the property value is returned. As a special case, a property value of None is returned as an empty string when used as a mapping. """ compare_attrs = ('properties',) def __init__(self, **kwargs): """ @param kwargs: initial property values (for testing) """ self.properties = {} # Track keys which are 'runtime', and should not be # persisted if a build is rebuilt self.runtime = set() self.build = None # will be set by the Build when starting self._used_secrets = {} if kwargs: self.update(kwargs, "TEST") self._master = None self._sourcestamps = None self._changes = None @property def master(self): if self.build is not None: return self.build.master return self._master @master.setter def master(self, value): self._master = value @property def sourcestamps(self): if self.build is not None: return [b.asDict() for b in self.build.getAllSourceStamps()] elif self._sourcestamps is not None: return self._sourcestamps raise AttributeError('neither build nor _sourcestamps are set') @sourcestamps.setter def sourcestamps(self, value): self._sourcestamps = value def getSourceStamp(self, codebase=''): for source in self.sourcestamps: if source['codebase'] == codebase: return source return None @property def changes(self): if self.build is not None: return [c.asChDict() for c in self.build.allChanges()] elif self._changes is not None: return self._changes raise AttributeError('neither build nor _changes are set') @changes.setter def changes(self, value): self._changes = value @property def files(self): if self.build is not None: return self.build.allFiles() files = [] # self.changes, not self._changes to raise AttributeError if unset for chdict in self.changes: files.extend(chdict['files']) return files @classmethod def fromDict(cls, propDict): properties = cls() for name, (value, source) in propDict.items(): properties.setProperty(name, value, source) return properties def __getstate__(self): d = self.__dict__.copy() d['build'] = None return d def __setstate__(self, d): self.__dict__ = d if not hasattr(self, 'runtime'): self.runtime = set() def __contains__(self, name): return name in self.properties def __getitem__(self, name): """Just get the value for this property.""" rv = self.properties[name][0] return rv def __bool__(self): return bool(self.properties) def getPropertySource(self, name): return self.properties[name][1] def asList(self): """Return the properties as a sorted list of (name, value, source)""" ret = sorted([(k, v[0], v[1]) for k, v in self.properties.items()]) return ret def asDict(self): """Return the properties as a simple key:value dictionary, properly unicoded""" return self.properties.copy() def __repr__(self): return ('Properties(**' + repr(dict((k, v[0]) for k, v in self.properties.items())) + ')') def update(self, dict, source, runtime=False): """Update this object from a dictionary, with an explicit source specified.""" for k, v in dict.items(): self.setProperty(k, v, source, runtime=runtime) def updateFromProperties(self, other): """Update this object based on another object; the other object's """ self.properties.update(other.properties) self.runtime.update(other.runtime) def updateFromPropertiesNoRuntime(self, other): """Update this object based on another object, but don't include properties that were marked as runtime.""" for k, v in other.properties.items(): if k not in other.runtime: self.properties[k] = v # IProperties methods def getProperty(self, name, default=None): return self.properties.get(name, (default,))[0] def hasProperty(self, name): return name in self.properties has_key = hasProperty def setProperty(self, name, value, source, runtime=False): name = util.bytes2unicode(name) if not IRenderable.providedBy(value): json.dumps(value) # Let the exception propagate ... source = util.bytes2unicode(source) self.properties[name] = (value, source) if runtime: self.runtime.add(name) def getProperties(self): return self def getBuild(self): return self.build def render(self, value): renderable = IRenderable(value) return defer.maybeDeferred(renderable.getRenderingFor, self) # as the secrets are used in the renderable, they can pretty much arrive anywhere # in the log of state strings # so we have the renderable record here which secrets are used that we must remove def useSecret(self, secret_value, secret_name): self._used_secrets[secret_value] = "<" + secret_name + ">" # This method shall then be called to remove secrets from any text that could be logged somewhere # and that could contain secrets def cleanupTextFromSecrets(self, text): # Better be correct and inefficient than efficient and wrong for k, v in self._used_secrets.items(): text = text.replace(k, v) return text class PropertiesMixin: """ A mixin to add L{IProperties} methods to a class which does not implement the interface, but which can be coerced to the interface via an adapter. This is useful because L{IProperties} methods are often called on L{Build} and L{BuildStatus} objects without first coercing them. @ivar set_runtime_properties: the default value for the C{runtime} parameter of L{setProperty}. """ set_runtime_properties = False def getProperty(self, propname, default=None): props = IProperties(self) return props.getProperty(propname, default) def hasProperty(self, propname): props = IProperties(self) return props.hasProperty(propname) has_key = hasProperty def setProperty(self, propname, value, source='Unknown', runtime=None): # source is not optional in IProperties, but is optional here to avoid # breaking user-supplied code that fails to specify a source props = IProperties(self) if runtime is None: runtime = self.set_runtime_properties props.setProperty(propname, value, source, runtime=runtime) def getProperties(self): return IProperties(self) def render(self, value): props = IProperties(self) return props.render(value) class _PropertyMap: """ Privately-used mapping object to implement WithProperties' substitutions, including the rendering of None as ''. """ colon_minus_re = re.compile(r"(.*):-(.*)") colon_tilde_re = re.compile(r"(.*):~(.*)") colon_plus_re = re.compile(r"(.*):\+(.*)") def __init__(self, properties): # use weakref here to avoid a reference loop self.properties = weakref.ref(properties) self.temp_vals = {} def __getitem__(self, key): properties = self.properties() assert properties is not None def colon_minus(mo): # %(prop:-repl)s # if prop exists, use it; otherwise, use repl prop, repl = mo.group(1, 2) if prop in self.temp_vals: return self.temp_vals[prop] elif prop in properties: return properties[prop] return repl def colon_tilde(mo): # %(prop:~repl)s # if prop exists and is true (nonempty), use it; otherwise, use # repl prop, repl = mo.group(1, 2) if prop in self.temp_vals and self.temp_vals[prop]: return self.temp_vals[prop] elif prop in properties and properties[prop]: return properties[prop] return repl def colon_plus(mo): # %(prop:+repl)s # if prop exists, use repl; otherwise, an empty string prop, repl = mo.group(1, 2) if prop in properties or prop in self.temp_vals: return repl return '' for regexp, fn in [ (self.colon_minus_re, colon_minus), (self.colon_tilde_re, colon_tilde), (self.colon_plus_re, colon_plus), ]: mo = regexp.match(key) if mo: rv = fn(mo) break else: # If explicitly passed as a kwarg, use that, # otherwise, use the property value. if key in self.temp_vals: rv = self.temp_vals[key] else: rv = properties[key] # translate 'None' to an empty string if rv is None: rv = '' return rv def add_temporary_value(self, key, val): 'Add a temporary value (to support keyword arguments to WithProperties)' self.temp_vals[key] = val @implementer(IRenderable) class WithProperties(util.ComparableMixin): """ This is a marker class, used fairly widely to indicate that we want to interpolate build properties. """ compare_attrs = ('fmtstring', 'args', 'lambda_subs') def __init__(self, fmtstring, *args, **lambda_subs): self.fmtstring = fmtstring self.args = args if not self.args: self.lambda_subs = lambda_subs for key, val in self.lambda_subs.items(): if not callable(val): raise ValueError( 'Value for lambda substitution "%s" must be callable.' % key) elif lambda_subs: raise ValueError( 'WithProperties takes either positional or keyword substitutions, not both.') def getRenderingFor(self, build): pmap = _PropertyMap(build.getProperties()) if self.args: strings = [] for name in self.args: strings.append(pmap[name]) s = self.fmtstring % tuple(strings) else: for k, v in self.lambda_subs.items(): pmap.add_temporary_value(k, v(build)) s = self.fmtstring % pmap return s class _NotHasKey(util.ComparableMixin): """A marker for missing ``hasKey`` parameter. To withstand ``deepcopy``, ``reload`` and pickle serialization round trips, check it with ``==`` or ``!=``. """ compare_attrs = () # any instance of _NotHasKey would do, yet we don't want to create and delete # them all the time _notHasKey = _NotHasKey() @implementer(IRenderable) class _Lookup(util.ComparableMixin): compare_attrs = ( 'value', 'index', 'default', 'defaultWhenFalse', 'hasKey', 'elideNoneAs') def __init__(self, value, index, default=None, defaultWhenFalse=True, hasKey=_notHasKey, elideNoneAs=None): self.value = value self.index = index self.default = default self.defaultWhenFalse = defaultWhenFalse self.hasKey = hasKey self.elideNoneAs = elideNoneAs def __repr__(self): return '_Lookup(%r, %r%s%s%s%s)' % ( self.value, self.index, ', default=%r' % (self.default,) if self.default is not None else '', ', defaultWhenFalse=False' if not self.defaultWhenFalse else '', ', hasKey=%r' % (self.hasKey,) if self.hasKey != _notHasKey else '', ', elideNoneAs=%r' % (self.elideNoneAs,) if self.elideNoneAs is not None else '') @defer.inlineCallbacks def getRenderingFor(self, build): value = build.render(self.value) index = build.render(self.index) value, index = yield defer.gatherResults([value, index]) if index not in value: rv = yield build.render(self.default) else: if self.defaultWhenFalse: rv = yield build.render(value[index]) if not rv: rv = yield build.render(self.default) elif self.hasKey != _notHasKey: rv = yield build.render(self.hasKey) elif self.hasKey != _notHasKey: rv = yield build.render(self.hasKey) else: rv = yield build.render(value[index]) if rv is None: rv = yield build.render(self.elideNoneAs) return rv def _getInterpolationList(fmtstring): # TODO: Verify that no positional substitutions are requested dd = collections.defaultdict(str) fmtstring % dd return list(dd) @implementer(IRenderable) class _PropertyDict: def getRenderingFor(self, build): return build.getProperties() _thePropertyDict = _PropertyDict() @implementer(IRenderable) class _WorkerPropertyDict: def getRenderingFor(self, build): return build.getBuild().getWorkerInfo() _theWorkerPropertyDict = _WorkerPropertyDict() @implementer(IRenderable) class _SecretRenderer: def __init__(self, secret_name): self.secret_name = secret_name @defer.inlineCallbacks def getRenderingFor(self, properties): secretsSrv = properties.master.namedServices.get("secrets") if not secretsSrv: error_message = "secrets service not started, need to configure" \ " SecretManager in c['services'] to use 'secrets'" \ "in Interpolate" raise KeyError(error_message) credsservice = properties.master.namedServices['secrets'] secret_detail = yield credsservice.get(self.secret_name) if secret_detail is None: raise KeyError("secret key %s is not found in any provider" % self.secret_name) properties.useSecret(secret_detail.value, self.secret_name) return secret_detail.value class Secret(_SecretRenderer): def __repr__(self): return "Secret({0})".format(self.secret_name) class _SecretIndexer: def __contains__(self, password): return True def __getitem__(self, password): return _SecretRenderer(password) @implementer(IRenderable) class _SourceStampDict(util.ComparableMixin): compare_attrs = ('codebase',) def __init__(self, codebase): self.codebase = codebase def getRenderingFor(self, props): ss = props.getSourceStamp(self.codebase) if ss: return ss return {} @implementer(IRenderable) class _Lazy(util.ComparableMixin): compare_attrs = ('value',) def __init__(self, value): self.value = value def getRenderingFor(self, build): return self.value def __repr__(self): return '_Lazy(%r)' % self.value @implementer(IRenderable) class Interpolate(util.ComparableMixin): """ This is a marker class, used fairly widely to indicate that we want to interpolate build properties. """ compare_attrs = ('fmtstring', 'args', 'kwargs') identifier_re = re.compile(r'^[\w._-]*$') def __init__(self, fmtstring, *args, **kwargs): self.fmtstring = fmtstring self.args = args self.kwargs = kwargs if self.args and self.kwargs: config.error("Interpolate takes either positional or keyword " "substitutions, not both.") if not self.args: self.interpolations = {} self._parse(fmtstring) # TODO: add case below for when there's no args or kwargs.. def __repr__(self): if self.args: return 'Interpolate(%r, *%r)' % (self.fmtstring, self.args) elif self.kwargs: return 'Interpolate(%r, **%r)' % (self.fmtstring, self.kwargs) return 'Interpolate(%r)' % (self.fmtstring,) @staticmethod def _parse_prop(arg): try: prop, repl = arg.split(":", 1) except ValueError: prop, repl = arg, None if not Interpolate.identifier_re.match(prop): config.error( "Property name must be alphanumeric for prop Interpolation '%s'" % arg) prop = repl = None return _thePropertyDict, prop, repl @staticmethod def _parse_secret(arg): try: secret, repl = arg.split(":", 1) except ValueError: secret, repl = arg, None return _SecretIndexer(), secret, repl @staticmethod def _parse_src(arg): # TODO: Handle changes try: codebase, attr, repl = arg.split(":", 2) except ValueError: try: codebase, attr = arg.split(":", 1) repl = None except ValueError: config.error( "Must specify both codebase and attribute for src Interpolation '%s'" % arg) return {}, None, None if not Interpolate.identifier_re.match(codebase): config.error( "Codebase must be alphanumeric for src Interpolation '%s'" % arg) codebase = attr = repl = None if not Interpolate.identifier_re.match(attr): config.error( "Attribute must be alphanumeric for src Interpolation '%s'" % arg) codebase = attr = repl = None return _SourceStampDict(codebase), attr, repl def _parse_worker(self, arg): try: prop, repl = arg.split(":", 1) except ValueError: prop, repl = arg, None return _theWorkerPropertyDict, prop, repl def _parse_kw(self, arg): try: kw, repl = arg.split(":", 1) except ValueError: kw, repl = arg, None if not Interpolate.identifier_re.match(kw): config.error( "Keyword must be alphanumeric for kw Interpolation '%s'" % arg) kw = repl = None return _Lazy(self.kwargs), kw, repl def _parseSubstitution(self, fmt): try: key, arg = fmt.split(":", 1) except ValueError: config.error( "invalid Interpolate substitution without selector '%s'" % fmt) return fn = getattr(self, "_parse_" + key, None) if not fn: config.error("invalid Interpolate selector '%s'" % key) return None return fn(arg) @staticmethod def _splitBalancedParen(delim, arg): parenCount = 0 for i, val in enumerate(arg): if arg[i] == "(": parenCount += 1 if arg[i] == ")": parenCount -= 1 if parenCount < 0: raise ValueError if parenCount == 0 and arg[i] == delim: return arg[0:i], arg[i + 1:] return arg def _parseColon_minus(self, d, kw, repl): return _Lookup(d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=False, elideNoneAs='') def _parseColon_tilde(self, d, kw, repl): return _Lookup(d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=True, elideNoneAs='') def _parseColon_plus(self, d, kw, repl): return _Lookup(d, kw, hasKey=Interpolate(repl, **self.kwargs), default='', defaultWhenFalse=False, elideNoneAs='') def _parseColon_ternary(self, d, kw, repl, defaultWhenFalse=False): delim = repl[0] if delim == '(': config.error("invalid Interpolate ternary delimiter '('") return None try: truePart, falsePart = self._splitBalancedParen(delim, repl[1:]) except ValueError: config.error("invalid Interpolate ternary expression '%s' with delimiter '%s'" % ( repl[1:], repl[0])) return None return _Lookup(d, kw, hasKey=Interpolate(truePart, **self.kwargs), default=Interpolate(falsePart, **self.kwargs), defaultWhenFalse=defaultWhenFalse, elideNoneAs='') def _parseColon_ternary_hash(self, d, kw, repl): return self._parseColon_ternary(d, kw, repl, defaultWhenFalse=True) def _parse(self, fmtstring): keys = _getInterpolationList(fmtstring) for key in keys: if key not in self.interpolations: d, kw, repl = self._parseSubstitution(key) if repl is None: repl = '-' for pattern, fn in [ ("-", self._parseColon_minus), ("~", self._parseColon_tilde), ("+", self._parseColon_plus), ("?", self._parseColon_ternary), ("#?", self._parseColon_ternary_hash) ]: junk, matches, tail = repl.partition(pattern) if not junk and matches: self.interpolations[key] = fn(d, kw, tail) break if key not in self.interpolations: config.error( "invalid Interpolate default type '%s'" % repl[0]) def getRenderingFor(self, build): props = build.getProperties() if self.args: d = props.render(self.args) d.addCallback(lambda args: self.fmtstring % tuple(args)) else: d = props.render(self.interpolations) d.addCallback(lambda res: self.fmtstring % res) return d @implementer(IRenderable) class _ComparisonRenderer(util.ComparableMixin): """ An instance of this class renders a comparison given by a comparator function with v1 and v2 """ compare_attrs = ('fn',) def __init__(self, v1, v2, cstr, comparator): self.v1, self.v2, self.comparator, self.cstr = v1, v2, comparator, cstr @defer.inlineCallbacks def getRenderingFor(self, props): v1 = yield props.render(self.v1) v2 = yield props.render(self.v2) return self.comparator(v1, v2) def __repr__(self): return '%r %r %r' % (self.v1, self.cstr, self.v2) @implementer(IRenderable) class Property(util.ComparableMixin): """ An instance of this class renders a property of a build. """ compare_attrs = ('key', 'default', 'defaultWhenFalse') def __init__(self, key, default=None, defaultWhenFalse=True): """ @param key: Property to render. @param default: Value to use if property isn't set. @param defaultWhenFalse: When true (default), use default value if property evaluates to False. Otherwise, use default value only when property isn't set. """ self.key = key self.default = default self.defaultWhenFalse = defaultWhenFalse def __eq__(self, other): return _ComparisonRenderer(self, other, "==", lambda v1, v2: v1 == v2) def __ne__(self, other): return _ComparisonRenderer(self, other, "!=", lambda v1, v2: v1 != v2) def __lt__(self, other): return _ComparisonRenderer(self, other, "<", lambda v1, v2: v1 < v2) def __le__(self, other): return _ComparisonRenderer(self, other, "<=", lambda v1, v2: v1 <= v2) def __gt__(self, other): return _ComparisonRenderer(self, other, ">", lambda v1, v2: v1 > v2) def __ge__(self, other): return _ComparisonRenderer(self, other, ">=", lambda v1, v2: v1 >= v2) def __repr__(self): return "Property({0})".format(self.key) def getRenderingFor(self, props): if self.defaultWhenFalse: d = props.render(props.getProperty(self.key)) @d.addCallback def checkDefault(rv): if rv: return rv return props.render(self.default) return d if props.hasProperty(self.key): return props.render(props.getProperty(self.key)) return props.render(self.default) @implementer(IRenderable) class FlattenList(util.ComparableMixin): """ An instance of this class flattens all nested lists in a list """ compare_attrs = ('nestedlist') def __init__(self, nestedlist, types=(list, tuple)): """ @param nestedlist: a list of values to render @param types: only flatten these types. defaults to (list, tuple) """ self.nestedlist = nestedlist self.types = types def getRenderingFor(self, props): d = props.render(self.nestedlist) @d.addCallback def flat(r): return flatten(r, self.types) return d def __add__(self, b): if isinstance(b, FlattenList): b = b.nestedlist return FlattenList(self.nestedlist + b, self.types) @implementer(IRenderable) class _Renderer(util.ComparableMixin): compare_attrs = ('fn',) def __init__(self, fn): self.fn = fn self.args = [] self.kwargs = {} def withArgs(self, *args, **kwargs): new_renderer = _Renderer(self.fn) new_renderer.args = self.args + list(args) new_renderer.kwargs = dict(self.kwargs) new_renderer.kwargs.update(kwargs) return new_renderer @defer.inlineCallbacks def getRenderingFor(self, props): args = yield props.render(self.args) kwargs = yield props.render(self.kwargs) # We allow the renderer fn to return a renderable for convenience result = yield self.fn(props, *args, **kwargs) result = yield props.render(result) return result def __repr__(self): if self.args or self.kwargs: return 'renderer(%r, args=%r, kwargs=%r)' % (self.fn, self.args, self.kwargs) return 'renderer(%r)' % (self.fn,) def renderer(fn): return _Renderer(fn) @implementer(IRenderable) class _DefaultRenderer: """ Default IRenderable adaptor. Calls .getRenderingFor if available, otherwise returns argument unchanged. """ def __init__(self, value): try: self.renderer = value.getRenderingFor except AttributeError: self.renderer = lambda _: value def getRenderingFor(self, build): return self.renderer(build) registerAdapter(_DefaultRenderer, object, IRenderable) @implementer(IRenderable) class _ListRenderer: """ List IRenderable adaptor. Maps Build.render over the list. """ def __init__(self, value): self.value = value def getRenderingFor(self, build): return defer.gatherResults([build.render(e) for e in self.value]) registerAdapter(_ListRenderer, list, IRenderable) @implementer(IRenderable) class _TupleRenderer: """ Tuple IRenderable adaptor. Maps Build.render over the tuple. """ def __init__(self, value): self.value = value def getRenderingFor(self, build): d = defer.gatherResults([build.render(e) for e in self.value]) d.addCallback(tuple) return d registerAdapter(_TupleRenderer, tuple, IRenderable) @implementer(IRenderable) class _DictRenderer: """ Dict IRenderable adaptor. Maps Build.render over the keys and values in the dict. """ def __init__(self, value): self.value = _ListRenderer( [_TupleRenderer((k, v)) for k, v in value.items()]) def getRenderingFor(self, build): d = self.value.getRenderingFor(build) d.addCallback(dict) return d registerAdapter(_DictRenderer, dict, IRenderable) @implementer(IRenderable) class Transform: """ A renderable that combines other renderables' results using an arbitrary function. """ def __init__(self, function, *args, **kwargs): if not callable(function) and not IRenderable.providedBy(function): config.error( "function given to Transform neither callable nor renderable") self._function = function self._args = args self._kwargs = kwargs @defer.inlineCallbacks def getRenderingFor(self, iprops): rfunction = yield iprops.render(self._function) rargs = yield iprops.render(self._args) rkwargs = yield iprops.render(self._kwargs) return rfunction(*rargs, **rkwargs) buildbot-2.6.0/master/buildbot/process/remotecommand.py000066400000000000000000000413211361162603000233030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import error from twisted.python import log from twisted.python.failure import Failure from twisted.spread import pb from buildbot import util from buildbot.pbutil import decode from buildbot.process import metrics from buildbot.process.results import CANCELLED from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.util.eventual import eventually from buildbot.worker.protocols import base class RemoteException(Exception): pass class RemoteCommand(base.RemoteCommandImpl): # class-level unique identifier generator for command ids _commandCounter = 0 active = False rc = None debug = False def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): if decodeRC is None: decodeRC = {0: SUCCESS} self.logs = {} self.delayedLogs = {} self._closeWhenFinished = {} self.collectStdout = collectStdout self.collectStderr = collectStderr self.stdout = '' self.stderr = '' self.updates = {} self.stdioLogName = stdioLogName self._startTime = None self._remoteElapsed = None self.remote_command = remote_command self.args = args self.ignore_updates = ignore_updates self.decodeRC = decodeRC self.conn = None self.worker = None self.step = None self.builder_name = None self.commandID = None self.deferred = None self.interrupted = False # a lock to make sure that only one log-handling method runs at a time. # This is really only a problem with old-style steps, which do not # wait for the Deferred from one method before invoking the next. self.loglock = defer.DeferredLock() def __repr__(self): return "" % (self.remote_command, id(self)) def run(self, step, conn, builder_name): self.active = True self.step = step self.conn = conn self.builder_name = builder_name # generate a new command id cmd_id = RemoteCommand._commandCounter RemoteCommand._commandCounter += 1 self.commandID = "%d" % cmd_id log.msg("%s: RemoteCommand.run [%s]" % (self, self.commandID)) self.deferred = defer.Deferred() d = defer.maybeDeferred(self._start) # _finished is called with an error for unknown commands, errors # that occur while the command is starting (including OSErrors in # exec()), StaleBroker (when the connection was lost before we # started), and pb.PBConnectionLost (when the worker isn't responding # over this connection, perhaps it had a power failure, or NAT # weirdness). If this happens, self.deferred is fired right away. d.addErrback(self._finished) # Connections which are lost while the command is running are caught # when our parent Step calls our .lostRemote() method. return self.deferred def useLog(self, log_, closeWhenFinished=False, logfileName=None): # NOTE: log may be a SyngLogFileWrapper or a Log instance, depending on # the step if not logfileName: logfileName = log_.getName() assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.logs[logfileName] = log_ self._closeWhenFinished[logfileName] = closeWhenFinished def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): assert logfileName not in self.logs assert logfileName not in self.delayedLogs self.delayedLogs[logfileName] = (activateCallBack, closeWhenFinished) def _start(self): self._startTime = util.now() # This method only initiates the remote command. # We will receive remote_update messages as the command runs. # We will get a single remote_complete when it finishes. # We should fire self.deferred when the command is done. d = self.conn.remoteStartCommand(self, self.builder_name, self.commandID, self.remote_command, self.args) return d @defer.inlineCallbacks def _finished(self, failure=None): self.active = False # the rc is send asynchronously and there is a chance it is still in the callback queue # when finished is received, we have to workaround in the master because worker might be older timeout = 10 while self.rc is None and timeout > 0: yield util.asyncSleep(.1) timeout -= 1 # call .remoteComplete. If it raises an exception, or returns the # Failure that we gave it, our self.deferred will be errbacked. If # it does not (either it ate the Failure or there the step finished # normally and it didn't raise a new exception), self.deferred will # be callbacked. d = defer.maybeDeferred(self.remoteComplete, failure) # arrange for the callback to get this RemoteCommand instance # instead of just None d.addCallback(lambda r: self) # this fires the original deferred we returned from .run(), # with self as the result, or a failure d.addBoth(self.deferred.callback) def interrupt(self, why): log.msg("RemoteCommand.interrupt", self, why) if not self.active or self.interrupted: log.msg(" but this RemoteCommand is already inactive") return defer.succeed(None) if not self.conn: log.msg(" but our .conn went away") return defer.succeed(None) if isinstance(why, Failure) and why.check(error.ConnectionLost): log.msg("RemoteCommand.disconnect: lost worker") self.conn = None self._finished(why) return defer.succeed(None) self.interrupted = True # tell the remote command to halt. Returns a Deferred that will fire # when the interrupt command has been delivered. d = self.conn.remoteInterruptCommand(self.builder_name, self.commandID, str(why)) # the worker may not have remote_interruptCommand d.addErrback(self._interruptFailed) return d def _interruptFailed(self, why): log.msg("RemoteCommand._interruptFailed", self) # TODO: forcibly stop the Command now, since we can't stop it # cleanly return None def remote_update(self, updates): """ I am called by the worker's L{buildbot_worker.base.WorkerForBuilderBase.sendUpdate} so I can receive updates from the running remote command. @type updates: list of [object, int] @param updates: list of updates from the remote command """ updates = decode(updates) self.worker.messageReceivedFromWorker() max_updatenum = 0 for (update, num) in updates: # log.msg("update[%d]:" % num) try: if self.active and not self.ignore_updates: self.remoteUpdate(update) except Exception: # log failure, terminate build, let worker retire the update self._finished(Failure()) # TODO: what if multiple updates arrive? should # skip the rest but ack them all if num > max_updatenum: max_updatenum = num return max_updatenum def remote_complete(self, failure=None): """ Called by the worker's L{buildbot_worker.base.WorkerForBuilderBase.commandComplete} to notify me the remote command has finished. @type failure: L{twisted.python.failure.Failure} or None @rtype: None """ self.worker.messageReceivedFromWorker() # call the real remoteComplete a moment later, but first return an # acknowledgement so the worker can retire the completion message. if self.active: eventually(self._finished, failure) return None def _unwrap(self, log): from buildbot.process import buildstep if isinstance(log, buildstep.SyncLogFileWrapper): return log.unwrap() return log @util.deferredLocked('loglock') @defer.inlineCallbacks def addStdout(self, data): if self.collectStdout: self.stdout += data if self.stdioLogName is not None and self.stdioLogName in self.logs: log_ = yield self._unwrap(self.logs[self.stdioLogName]) log_.addStdout(data) @util.deferredLocked('loglock') @defer.inlineCallbacks def addStderr(self, data): if self.collectStderr: self.stderr += data if self.stdioLogName is not None and self.stdioLogName in self.logs: log_ = yield self._unwrap(self.logs[self.stdioLogName]) log_.addStderr(data) @util.deferredLocked('loglock') @defer.inlineCallbacks def addHeader(self, data): if self.stdioLogName is not None and self.stdioLogName in self.logs: log_ = yield self._unwrap(self.logs[self.stdioLogName]) log_.addHeader(data) @util.deferredLocked('loglock') @defer.inlineCallbacks def addToLog(self, logname, data): # Activate delayed logs on first data. if logname in self.delayedLogs: (activateCallBack, closeWhenFinished) = self.delayedLogs[logname] del self.delayedLogs[logname] loog = yield activateCallBack(self) loog = yield self._unwrap(loog) self.logs[logname] = loog self._closeWhenFinished[logname] = closeWhenFinished if logname in self.logs: log_ = yield self._unwrap(self.logs[logname]) yield log_.addStdout(data) else: log.msg("%s.addToLog: no such log %s" % (self, logname)) @metrics.countMethod('RemoteCommand.remoteUpdate()') @defer.inlineCallbacks def remoteUpdate(self, update): def cleanup(data): if self.step is None: return data return self.step.build.properties.cleanupTextFromSecrets(data) if self.debug: for k, v in update.items(): log.msg("Update[%s]: %s" % (k, v)) if "stdout" in update: # 'stdout': data yield self.addStdout(cleanup(update['stdout'])) if "stderr" in update: # 'stderr': data yield self.addStderr(cleanup(update['stderr'])) if "header" in update: # 'header': data yield self.addHeader(cleanup(update['header'])) if "log" in update: # 'log': (logname, data) logname, data = update['log'] yield self.addToLog(logname, cleanup(data)) if "rc" in update: rc = self.rc = update['rc'] log.msg("%s rc=%s" % (self, rc)) yield self.addHeader("program finished with exit code %d\n" % rc) if "elapsed" in update: self._remoteElapsed = update['elapsed'] # TODO: these should be handled at the RemoteCommand level for k in update: if k not in ('stdout', 'stderr', 'header', 'rc'): if k not in self.updates: self.updates[k] = [] self.updates[k].append(update[k]) @util.deferredLocked('loglock') @defer.inlineCallbacks def remoteComplete(self, maybeFailure): if self._startTime and self._remoteElapsed: delta = (util.now() - self._startTime) - self._remoteElapsed metrics.MetricTimeEvent.log("RemoteCommand.overhead", delta) for name, loog in self.logs.items(): if self._closeWhenFinished[name]: if maybeFailure: loog = yield self._unwrap(loog) yield loog.addHeader("\nremoteFailed: %s" % maybeFailure) else: log.msg("closing log %s" % loog) loog.finish() if maybeFailure: # workaround http://twistedmatrix.com/trac/ticket/5507 # CopiedFailure cannot be raised back, this make debug difficult if isinstance(maybeFailure, pb.CopiedFailure): maybeFailure.value = RemoteException("%s: %s\n%s" % ( maybeFailure.type, maybeFailure.value, maybeFailure.traceback)) maybeFailure.type = RemoteException maybeFailure.raiseException() def results(self): if self.interrupted: return CANCELLED if self.rc in self.decodeRC: return self.decodeRC[self.rc] return FAILURE def didFail(self): return self.results() == FAILURE LoggedRemoteCommand = RemoteCommand class RemoteShellCommand(RemoteCommand): def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): if logfiles is None: logfiles = {} if decodeRC is None: decodeRC = {0: SUCCESS} self.command = command # stash .command, set it later if isinstance(self.command, (str, bytes)): # Single string command doesn't support obfuscation. self.fake_command = command else: # Try to obfuscate command. def obfuscate(arg): if isinstance(arg, tuple) and len(arg) == 3 and arg[0] == 'obfuscated': return arg[2] return arg self.fake_command = [obfuscate(c) for c in self.command] if env is not None: # avoid mutating the original master.cfg dictionary. Each # ShellCommand gets its own copy, any start() methods won't be # able to modify the original. env = env.copy() args = {'workdir': workdir, 'env': env, 'want_stdout': want_stdout, 'want_stderr': want_stderr, 'logfiles': logfiles, 'timeout': timeout, 'maxTime': maxTime, 'sigtermTime': sigtermTime, 'usePTY': usePTY, 'logEnviron': logEnviron, 'initial_stdin': initialStdin } if interruptSignal is not None: args['interruptSignal'] = interruptSignal super().__init__("shell", args, collectStdout=collectStdout, collectStderr=collectStderr, decodeRC=decodeRC, stdioLogName=stdioLogName) def _start(self): if self.args['usePTY'] is None: if self.step.workerVersionIsOlderThan("shell", "3.0"): # Old worker default of usePTY is to use worker-configuration. self.args['usePTY'] = "slave-config" else: # buildbot-worker doesn't support worker-configured usePTY, # and usePTY defaults to False. self.args['usePTY'] = False self.args['command'] = self.command if self.remote_command == "shell": # non-ShellCommand worker commands are responsible for doing this # fixup themselves if self.step.workerVersion("shell", "old") == "old": self.args['dir'] = self.args['workdir'] if self.step.workerVersionIsOlderThan("shell", "2.16"): self.args.pop('sigtermTime', None) what = "command '%s' in dir '%s'" % (self.fake_command, self.args['workdir']) log.msg(what) return super()._start() def __repr__(self): return "" % repr(self.fake_command) buildbot-2.6.0/master/buildbot/process/remotetransfer.py000066400000000000000000000132721361162603000235150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import tarfile import tempfile from io import BytesIO from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.worker.protocols import base """ module for regrouping all FileWriterImpl and FileReaderImpl away from steps """ class FileWriter(base.FileWriterImpl): """ Helper class that acts as a file-object with write access """ def __init__(self, destfile, maxsize, mode): # Create missing directories. destfile = os.path.abspath(destfile) dirname = os.path.dirname(destfile) if not os.path.exists(dirname): os.makedirs(dirname) self.destfile = destfile self.mode = mode fd, self.tmpname = tempfile.mkstemp(dir=dirname) self.fp = os.fdopen(fd, 'wb') self.remaining = maxsize def remote_write(self, data): """ Called from remote worker to write L{data} to L{fp} within boundaries of L{maxsize} @type data: C{string} @param data: String of data to write """ data = unicode2bytes(data) if self.remaining is not None: if len(data) > self.remaining: data = data[:self.remaining] self.fp.write(data) self.remaining = self.remaining - len(data) else: self.fp.write(data) def remote_utime(self, accessed_modified): os.utime(self.destfile, accessed_modified) def remote_close(self): """ Called by remote worker to state that no more data will be transferred """ self.fp.close() self.fp = None # on windows, os.rename does not automatically unlink, so do it # manually if os.path.exists(self.destfile): os.unlink(self.destfile) os.rename(self.tmpname, self.destfile) self.tmpname = None if self.mode is not None: os.chmod(self.destfile, self.mode) def cancel(self): # unclean shutdown, the file is probably truncated, so delete it # altogether rather than deliver a corrupted file fp = getattr(self, "fp", None) if fp: fp.close() if self.destfile and os.path.exists(self.destfile): os.unlink(self.destfile) if self.tmpname and os.path.exists(self.tmpname): os.unlink(self.tmpname) class DirectoryWriter(FileWriter): """ A DirectoryWriter is implemented as a FileWriter, with an added post-processing step to unpack the archive, once the transfer has completed. """ def __init__(self, destroot, maxsize, compress, mode): self.destroot = destroot self.compress = compress self.fd, self.tarname = tempfile.mkstemp() os.close(self.fd) super().__init__(self.tarname, maxsize, mode) def remote_unpack(self): """ Called by remote worker to state that no more data will be transferred """ # Make sure remote_close is called, otherwise atomic rename won't happen self.remote_close() # Map configured compression to a TarFile setting if self.compress == 'bz2': mode = 'r|bz2' elif self.compress == 'gz': mode = 'r|gz' else: mode = 'r' # Unpack archive and clean up after self archive = tarfile.open(name=self.tarname, mode=mode) archive.extractall(path=self.destroot) archive.close() os.remove(self.tarname) class FileReader(base.FileReaderImpl): """ Helper class that acts as a file-object with read access """ def __init__(self, fp): self.fp = fp def remote_read(self, maxlength): """ Called from remote worker to read at most L{maxlength} bytes of data @type maxlength: C{integer} @param maxlength: Maximum number of data bytes that can be returned @return: Data read from L{fp} @rtype: C{string} of bytes read from file """ if self.fp is None: return '' data = self.fp.read(maxlength) return data def remote_close(self): """ Called by remote worker to state that no more data will be transferred """ if self.fp is not None: self.fp.close() self.fp = None class StringFileWriter(base.FileWriterImpl): """ FileWriter class that just puts received data into a buffer. Used to upload a file from worker for inline processing rather than writing into a file on master. """ def __init__(self): self.buffer = "" def remote_write(self, data): self.buffer += bytes2unicode(data) def remote_close(self): pass class StringFileReader(FileReader): """ FileWriter class that just buid send data from a string. Used to download a file to worker from local string rather than first writing into a file on master. """ def __init__(self, s): s = unicode2bytes(s) super().__init__(BytesIO(s)) buildbot-2.6.0/master/buildbot/process/results.py000066400000000000000000000050021361162603000221460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members ALL_RESULTS = list(range(7)) SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, CANCELLED = ALL_RESULTS Results = ["success", "warnings", "failure", "skipped", "exception", "retry", "cancelled"] def statusToString(status): if status is None: return "not finished" if status < 0 or status >= len(Results): return "Invalid status" return Results[status] def worst_status(a, b): # SKIPPED > SUCCESS > WARNINGS > FAILURE > EXCEPTION > RETRY > CANCELLED # CANCELLED needs to be considered the worst. for s in (CANCELLED, RETRY, EXCEPTION, FAILURE, WARNINGS, SUCCESS, SKIPPED): if s in (a, b): return s def computeResultAndTermination(obj, result, previousResult): possible_overall_result = result terminate = False if result == FAILURE: if not obj.flunkOnFailure: possible_overall_result = SUCCESS if obj.warnOnFailure: possible_overall_result = WARNINGS if obj.flunkOnFailure: possible_overall_result = FAILURE if obj.haltOnFailure: terminate = True elif result == WARNINGS: if not obj.warnOnWarnings: possible_overall_result = SUCCESS else: possible_overall_result = WARNINGS if obj.flunkOnWarnings: possible_overall_result = FAILURE elif result in (EXCEPTION, RETRY, CANCELLED): terminate = True result = worst_status(previousResult, possible_overall_result) return result, terminate class ResultComputingConfigMixin: haltOnFailure = False flunkOnWarnings = False flunkOnFailure = True warnOnWarnings = False warnOnFailure = False resultConfig = [ "haltOnFailure", "flunkOnWarnings", "flunkOnFailure", "warnOnWarnings", "warnOnFailure", ] buildbot-2.6.0/master/buildbot/process/subunitlogobserver.py000066400000000000000000000015541361162603000244200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this used to be referenced here, so we keep a link for old time's sake import buildbot.steps.subunit SubunitShellCommand = buildbot.steps.subunit.SubunitShellCommand buildbot-2.6.0/master/buildbot/process/users/000077500000000000000000000000001361162603000212375ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/process/users/__init__.py000066400000000000000000000000001361162603000233360ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/process/users/manager.py000066400000000000000000000032461361162603000232300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.application import service from twisted.internet import defer from buildbot.util import service as util_service class UserManagerManager(util_service.ReconfigurableServiceMixin, service.MultiService): # this class manages a fleet of user managers; hence the name.. def __init__(self, master): super().__init__() self.setName('user_manager_manager') self.master = master @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # this is easy - kick out all of the old managers, and add the # new ones. # pylint: disable=cell-var-from-loop for mgr in list(self): yield defer.maybeDeferred(mgr.disownServiceParent) for mgr in new_config.user_managers: yield mgr.setServiceParent(self) # reconfig any newly-added change sources, as well as existing yield super().reconfigServiceWithBuildbotConfig(new_config) buildbot-2.6.0/master/buildbot/process/users/manual.py000066400000000000000000000213001361162603000230620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import pbutil from buildbot.util import service # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. class CommandlineUserManagerPerspective(pbutil.NewCredPerspective): """ Perspective registered in buildbot.pbmanager and contains the real workings of `buildbot user` by working with the database when perspective_commandline is called. """ def __init__(self, master): self.master = master def formatResults(self, op, results): """ This formats the results of the database operations for printing back to the caller @param op: operation to perform (add, remove, update, get) @type op: string @param results: results from db queries in perspective_commandline @type results: list @returns: string containing formatted results """ formatted_results = "" if op == 'add': # list, alternating ident, uid formatted_results += "user(s) added:\n" for user in results: if isinstance(user, str): formatted_results += "identifier: %s\n" % user else: formatted_results += "uid: %d\n\n" % user elif op == 'remove': # list of dictionaries formatted_results += "user(s) removed:\n" for user in results: if user: formatted_results += "identifier: %s\n" % (user) elif op == 'update': # list, alternating ident, None formatted_results += "user(s) updated:\n" for user in results: if user: formatted_results += "identifier: %s\n" % (user) elif op == 'get': # list of dictionaries formatted_results += "user(s) found:\n" for user in results: if user: for key in sorted(user.keys()): if key != 'bb_password': formatted_results += "%s: %s\n" % (key, user[key]) formatted_results += "\n" else: formatted_results += "no match found\n" return formatted_results @defer.inlineCallbacks def perspective_commandline(self, op, bb_username, bb_password, ids, info): """ This performs the requested operations from the `buildbot user` call by calling the proper buildbot.db.users methods based on the operation. It yields a deferred instance with the results from the database methods. @param op: operation to perform (add, remove, update, get) @type op: string @param bb_username: username portion of auth credentials @type bb_username: string @param bb_password: hashed password portion of auth credentials @type bb_password: hashed string @param ids: user identifiers used to find existing users @type ids: list of strings or None @param info: type/value pairs for each user that will be added or updated in the database @type info: list of dictionaries or None @returns: results from db.users methods via deferred """ log.msg("perspective_commandline called") results = [] # pylint: disable=too-many-nested-blocks if ids: for user in ids: # get identifier, guaranteed to be in user from checks # done in C{scripts.runner} uid = yield self.master.db.users.identifierToUid( identifier=user) result = None if op == 'remove': if uid: yield self.master.db.users.removeUser(uid) result = user else: log.msg("Unable to find uid for identifier %s" % user) elif op == 'get': if uid: result = yield self.master.db.users.getUser(uid) else: log.msg("Unable to find uid for identifier %s" % user) results.append(result) else: for user in info: # get identifier, guaranteed to be in user from checks # done in C{scripts.runner} ident = user.pop('identifier') uid = yield self.master.db.users.identifierToUid( identifier=ident) # if only an identifier was in user, we're updating only # the bb_username and bb_password. if not user: if uid: result = yield self.master.db.users.updateUser( uid=uid, identifier=ident, bb_username=bb_username, bb_password=bb_password) results.append(ident) else: log.msg("Unable to find uid for identifier %s" % user) else: # when adding, we update the user after the first attr once_through = False for attr in user: result = None if op == 'update' or once_through: if uid: result = yield self.master.db.users.updateUser( uid=uid, identifier=ident, bb_username=bb_username, bb_password=bb_password, attr_type=attr, attr_data=user[attr]) else: log.msg("Unable to find uid for identifier %s" % user) elif op == 'add': result = yield self.master.db.users.findUserByAttr( identifier=ident, attr_type=attr, attr_data=user[attr]) once_through = True results.append(ident) # result is None from updateUser calls if result: results.append(result) uid = result results = self.formatResults(op, results) return results class CommandlineUserManager(service.AsyncMultiService): """ Service that runs to set up and register CommandlineUserManagerPerspective so `buildbot user` calls get to perspective_commandline. """ def __init__(self, username=None, passwd=None, port=None): super().__init__() assert username and passwd, ("A username and password pair must be given " "to connect and use `buildbot user`") self.username = username self.passwd = passwd assert port, "A port must be specified for a PB connection" self.port = port self.registration = None @defer.inlineCallbacks def startService(self): # set up factory and register with buildbot.pbmanager def factory(mind, username): return CommandlineUserManagerPerspective(self.master) self.registration = yield self.master.pbmanager.register(self.port, self.username, self.passwd, factory) yield super().startService() def stopService(self): d = defer.maybeDeferred(service.AsyncMultiService.stopService, self) @d.addCallback def unreg(_): if self.registration: return self.registration.unregister() return d buildbot-2.6.0/master/buildbot/process/users/users.py000066400000000000000000000133241361162603000227550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from binascii import hexlify from hashlib import sha1 from twisted.internet import defer from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import flatten from buildbot.util import unicode2bytes srcs = ['git', 'svn', 'hg', 'cvs', 'darcs', 'bzr'] salt_len = 8 def createUserObject(master, author, src=None): """ Take a Change author and source and translate them into a User Object, storing the user in master.db, or returning None if the src is not specified. @param master: link to Buildmaster for database operations @type master: master.Buildmaster instance @param authors: Change author if string or Authz instance @type authors: string or www.authz instance @param src: source from which the User Object will be created @type src: string """ if not src: log.msg("No vcs information found, unable to create User Object") return defer.succeed(None) if src in srcs: usdict = dict(identifier=author, attr_type=src, attr_data=author) else: log.msg("Unrecognized source argument: %s" % src) return defer.succeed(None) return master.db.users.findUserByAttr( identifier=usdict['identifier'], attr_type=usdict['attr_type'], attr_data=usdict['attr_data']) def _extractContact(usdict, contact_types, uid): if usdict: for type in contact_types: contact = usdict.get(type) if contact: break else: contact = None if contact is None: log.msg(format="Unable to find any of %(contact_types)r for uid: %(uid)r", contact_types=contact_types, uid=uid) return contact def getUserContact(master, contact_types, uid): """ This is a simple getter function that returns a user attribute that matches the contact_types argument, or returns None if no uid/match is found. @param master: BuildMaster used to query the database @type master: BuildMaster instance @param contact_types: list of contact attributes to look for in in a given user, such as 'email' or 'nick' @type contact_types: list of strings @param uid: user that is searched for the contact_types match @type uid: integer @returns: string of contact information or None via deferred """ d = master.db.users.getUser(uid) d.addCallback(_extractContact, contact_types, uid) return d def _filter(contacts): def notNone(c): return c is not None return filter(notNone, contacts) def getUsersContacts(master, contact_types, uids): d = defer.gatherResults( [getUserContact(master, contact_types, uid) for uid in uids]) d.addCallback(_filter) return d def getChangeContacts(master, change, contact_types): d = master.db.changes.getChangeUids(change.number) d.addCallback(lambda uids: getUsersContacts(master, contact_types, uids)) return d def getSourceStampContacts(master, ss, contact_types): dl = [getChangeContacts(master, change, contact_types) for change in ss.changes] if False and ss.patch_info: d = master.db.users.getUserByUsername(ss.patch_into[0]) d.addCallback(_extractContact, contact_types, ss.patch_info[0]) d.addCallback(lambda contact: filter(None, [contact])) dl.append(d) d = defer.gatherResults(dl) d.addCallback(flatten) return d def getBuildContacts(master, build, contact_types): dl = [] ss_list = build.getSourceStamps() for ss in ss_list: dl.append(getSourceStampContacts(master, ss, contact_types)) d = defer.gatherResults(dl) d.addCallback(flatten) @d.addCallback def addOwners(recipients): dl = [] for owner in build.getInterestedUsers(): d = master.db.users.getUserByUsername(owner) d.addCallback(_extractContact, contact_types, owner) dl.append(d) d = defer.gatherResults(dl) d.addCallback(_filter) d.addCallback(lambda owners: recipients + owners) return d return d def encrypt(passwd): """ Encrypts the incoming password after adding some salt to store it in the database. @param passwd: password portion of user credentials @type passwd: string @returns: encrypted/salted string """ m = sha1() salt = hexlify(os.urandom(salt_len)) m.update(unicode2bytes(passwd) + salt) crypted = bytes2unicode(salt) + m.hexdigest() return crypted def check_passwd(guess, passwd): """ Tests to see if the guess, after salting and hashing, matches the passwd from the database. @param guess: incoming password trying to be used for authentication @param passwd: already encrypted password from the database @returns: boolean """ m = sha1() salt = passwd[:salt_len * 2] # salt_len * 2 due to encode('hex_codec') m.update(unicode2bytes(guess) + unicode2bytes(salt)) crypted_guess = bytes2unicode(salt) + m.hexdigest() return (crypted_guess == bytes2unicode(passwd)) buildbot-2.6.0/master/buildbot/process/workerforbuilder.py000066400000000000000000000176531361162603000240530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.python.constants import NamedConstant from twisted.python.constants import Names class States(Names): # The worker isn't attached, or is in the process of attaching. DETACHED = NamedConstant() # The worker is available to build: either attached, or a latent worker. AVAILABLE = NamedConstant() # The worker is building. BUILDING = NamedConstant() class AbstractWorkerForBuilder: def __init__(self): self.ping_watchers = [] self.state = None # set in subclass self.worker = None self.builder_name = None self.locks = None def __repr__(self): r = ["<", self.__class__.__name__] if self.builder_name: r.extend([" builder=", repr(self.builder_name)]) if self.worker: r.extend([" worker=", repr(self.worker.workername)]) r.extend([" state=", self.state.name, ">"]) return ''.join(r) def setBuilder(self, b): self.builder = b self.builder_name = b.name def getWorkerCommandVersion(self, command, oldversion=None): if self.remoteCommands is None: # the worker is 0.5.0 or earlier return oldversion return self.remoteCommands.get(command) def isAvailable(self): # if this WorkerForBuilder is busy, then it's definitely not available if self.isBusy(): return False # otherwise, check in with the Worker if self.worker: return self.worker.canStartBuild() # no worker? not very available. return False def isBusy(self): return self.state != States.AVAILABLE def buildStarted(self): self.state = States.BUILDING # AbstractWorker doesn't always have a buildStarted method # so only call it if it is available. try: worker_buildStarted = self.worker.buildStarted except AttributeError: pass else: worker_buildStarted(self) def buildFinished(self): self.state = States.AVAILABLE if self.worker: self.worker.buildFinished(self) @defer.inlineCallbacks def attached(self, worker, commands): """ @type worker: L{buildbot.worker.Worker} @param worker: the Worker that represents the worker as a whole @type commands: dict: string -> string, or None @param commands: provides the worker's version of each RemoteCommand """ self.remoteCommands = commands # maps command name to version if self.worker is None: self.worker = worker self.worker.addWorkerForBuilder(self) else: assert self.worker == worker log.msg("Worker %s attached to %s" % (worker.workername, self.builder_name)) yield self.worker.conn.remotePrint(message="attached") return self def prepare(self, build): if not self.worker or not self.worker.acquireLocks(): return defer.succeed(False) return defer.succeed(True) def ping(self, status=None): """Ping the worker to make sure it is still there. Returns a Deferred that fires with True if it is. @param status: if you point this at a BuilderStatus, a 'pinging' event will be pushed. """ newping = not self.ping_watchers d = defer.Deferred() self.ping_watchers.append(d) if newping: Ping().ping(self.worker.conn).addBoth(self._pong) return d def abortPingIfAny(self): watchers, self.ping_watchers = self.ping_watchers, [] for d in watchers: d.errback(PingException('aborted ping')) def _pong(self, res): watchers, self.ping_watchers = self.ping_watchers, [] for d in watchers: d.callback(res) def detached(self): log.msg("Worker %s detached from %s" % (self.worker.workername, self.builder_name)) if self.worker: self.worker.removeWorkerForBuilder(self) self.worker = None self.remoteCommands = None class PingException(Exception): pass class Ping: running = False def ping(self, conn): assert not self.running if not conn: # clearly the ping must fail return defer.fail(PingException("Worker not connected?")) self.running = True log.msg("sending ping") self.d = defer.Deferred() # TODO: add a distinct 'ping' command on the worker.. using 'print' # for this purpose is kind of silly. conn.remotePrint(message="ping").addCallbacks(self._pong, self._ping_failed, errbackArgs=(conn,)) return self.d def _pong(self, res): log.msg("ping finished: success") self.d.callback(True) def _ping_failed(self, res, conn): log.msg("ping finished: failure") # the worker has some sort of internal error, disconnect them. If we # don't, we'll requeue a build and ping them again right away, # creating a nasty loop. conn.loseConnection() self.d.errback(res) class WorkerForBuilder(AbstractWorkerForBuilder): def __init__(self): super().__init__() self.state = States.DETACHED @defer.inlineCallbacks def attached(self, worker, commands): wfb = yield super().attached(worker, commands) # Only set available on non-latent workers, since latent workers # only attach while a build is in progress. self.state = States.AVAILABLE return wfb def detached(self): super().detached() if self.worker: self.worker.removeWorkerForBuilder(self) self.worker = None self.state = States.DETACHED class LatentWorkerForBuilder(AbstractWorkerForBuilder): def __init__(self, worker, builder): super().__init__() self.worker = worker self.state = States.AVAILABLE self.setBuilder(builder) self.worker.addWorkerForBuilder(self) log.msg("Latent worker %s attached to %s" % (worker.workername, self.builder_name)) def prepare(self, build): # If we can't lock, then don't bother trying to substantiate if not self.worker or not self.worker.acquireLocks(): return defer.succeed(False) self.state = States.DETACHED d = self.substantiate(build) return d def attached(self, worker, commands): # When a latent worker is attached, it is actually because it prepared for a build # thus building and not available like for normal worker if self.state == States.DETACHED: self.state = States.BUILDING return super().attached(worker, commands) def substantiate(self, build): return self.worker.substantiate(self, build) def ping(self, status=None): if not self.worker.substantiated: return defer.fail(PingException("worker is not substantiated")) return super().ping(status) buildbot-2.6.0/master/buildbot/reporters/000077500000000000000000000000001361162603000204455ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/reporters/__init__.py000066400000000000000000000000001361162603000225440ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/reporters/bitbucket.py000066400000000000000000000112101361162603000227660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from urllib.parse import urlparse from twisted.internet import defer from buildbot.process.results import SUCCESS from buildbot.reporters import http from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() # Magic words understood by Butbucket REST API BITBUCKET_INPROGRESS = 'INPROGRESS' BITBUCKET_SUCCESSFUL = 'SUCCESSFUL' BITBUCKET_FAILED = 'FAILED' _BASE_URL = 'https://api.bitbucket.org/2.0/repositories' _OAUTH_URL = 'https://bitbucket.org/site/oauth2/access_token' _GET_TOKEN_DATA = { 'grant_type': 'client_credentials' } class BitbucketStatusPush(http.HttpStatusPushBase): name = "BitbucketStatusPush" @defer.inlineCallbacks def reconfigService(self, oauth_key, oauth_secret, base_url=_BASE_URL, oauth_url=_OAUTH_URL, **kwargs): oauth_key, oauth_secret = yield self.renderSecrets(oauth_key, oauth_secret) yield super().reconfigService(**kwargs) if base_url.endswith('/'): base_url = base_url[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, debug=self.debug, verify=self.verify) self.oauthhttp = yield httpclientservice.HTTPClientService.getService( self.master, oauth_url, auth=(oauth_key, oauth_secret), debug=self.debug, verify=self.verify) @defer.inlineCallbacks def send(self, build): results = build['results'] oauth_request = yield self.oauthhttp.post("", data=_GET_TOKEN_DATA) if oauth_request.code == 200: content_json = yield oauth_request.json() token = content_json['access_token'] else: content = yield oauth_request.content() log.error("{code}: unable to authenticate to Bitbucket {content}", code=oauth_request.code, content=content) return if build['complete']: status = BITBUCKET_SUCCESSFUL if results == SUCCESS else BITBUCKET_FAILED else: status = BITBUCKET_INPROGRESS for sourcestamp in build['buildset']['sourcestamps']: sha = sourcestamp['revision'] body = { 'state': status, 'key': build['builder']['name'], 'name': build['builder']['name'], 'url': build['url'] } owner, repo = self.get_owner_and_repo(sourcestamp['repository']) self._http.updateHeaders({'Authorization': 'Bearer ' + token}) bitbucket_uri = '/' + \ '/'.join([owner, repo, 'commit', sha, 'statuses', 'build']) response = yield self._http.post(bitbucket_uri, json=body) if response.code != 201: content = yield response.content() log.error("{code}: unable to upload Bitbucket status {content}", code=response.code, content=content) @staticmethod def get_owner_and_repo(repourl): """ Takes a git repository URL from Bitbucket and tries to determine the owner and repository name :param repourl: Bitbucket git repo in the form of git@bitbucket.com:OWNER/REPONAME.git https://bitbucket.com/OWNER/REPONAME.git ssh://git@bitbucket.com/OWNER/REPONAME.git :return: owner, repo: The owner of the repository and the repository name """ parsed = urlparse(repourl) if parsed.scheme: path = parsed.path[1:] else: # we assume git@host:owner/repo.git here path = parsed.path.split(':', 1)[-1] if path.endswith('.git'): path = path[:-4] while path.endswith('/'): path = path[:-1] parts = path.split('/') assert len(parts) == 2, 'OWNER/REPONAME is expected' return parts buildbot-2.6.0/master/buildbot/reporters/bitbucketserver.py000066400000000000000000000164511361162603000242310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from urllib.parse import urlparse from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import SUCCESS from buildbot.reporters import http from buildbot.reporters import notifier from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import unicode2bytes # Magic words understood by Bitbucket Server REST API INPROGRESS = 'INPROGRESS' SUCCESSFUL = 'SUCCESSFUL' FAILED = 'FAILED' STATUS_API_URL = '/rest/build-status/1.0/commits/{sha}' COMMENT_API_URL = '/rest/api/1.0{path}/comments' HTTP_PROCESSED = 204 HTTP_CREATED = 201 class BitbucketServerStatusPush(http.HttpStatusPushBase): name = "BitbucketServerStatusPush" @defer.inlineCallbacks def reconfigService(self, base_url, user, password, key=None, statusName=None, startDescription=None, endDescription=None, verbose=False, **kwargs): user, password = yield self.renderSecrets(user, password) yield super().reconfigService(wantProperties=True, **kwargs) self.key = key or Interpolate('%(prop:buildername)s') self.context = statusName self.endDescription = endDescription or 'Build done.' self.startDescription = startDescription or 'Build started.' self.verbose = verbose self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, auth=(user, password), debug=self.debug, verify=self.verify) def createStatus(self, sha, state, url, key, description=None, context=None): payload = { 'state': state, 'url': url, 'key': key, } if description: payload['description'] = description if context: payload['name'] = context return self._http.post(STATUS_API_URL.format(sha=sha), json=payload) @defer.inlineCallbacks def send(self, build): props = Properties.fromDict(build['properties']) props.master = self.master results = build['results'] if build['complete']: state = SUCCESSFUL if results == SUCCESS else FAILED description = self.endDescription else: state = INPROGRESS description = self.startDescription key = yield props.render(self.key) description = yield props.render(description) if description else None context = yield props.render(self.context) if self.context else None sourcestamps = build['buildset']['sourcestamps'] for sourcestamp in sourcestamps: try: sha = sourcestamp['revision'] if sha is None: log.msg("Unable to get the commit hash") continue url = build['url'] res = yield self.createStatus( sha=sha, state=state, url=url, key=key, description=description, context=context ) if res.code not in (HTTP_PROCESSED,): content = yield res.content() log.msg("{code}: Unable to send Bitbucket Server status: " "{content}".format(code=res.code, content=content)) elif self.verbose: log.msg('Status "{state}" sent for {sha}.'.format( state=state, sha=sha)) except Exception as e: log.err( e, 'Failed to send status "{state}" for ' '{repo} at {sha}'.format( state=state, repo=sourcestamp['repository'], sha=sha )) class BitbucketServerPRCommentPush(notifier.NotifierBase): name = "BitbucketServerPRCommentPush" @defer.inlineCallbacks def reconfigService(self, base_url, user, password, messageFormatter=None, verbose=False, debug=None, verify=None, **kwargs): user, password = yield self.renderSecrets(user, password) yield super().reconfigService( messageFormatter=messageFormatter, watchedWorkers=None, messageFormatterMissingWorker=None, subject='', addLogs=False, addPatch=False, **kwargs) self.verbose = verbose self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url, auth=(user, password), debug=debug, verify=verify) def checkConfig(self, base_url, user, password, messageFormatter=None, verbose=False, debug=None, verify=None, **kwargs): super().checkConfig(messageFormatter=messageFormatter, watchedWorkers=None, messageFormatterMissingWorker=None, subject='', addLogs=False, addPatch=False, **kwargs) def isMessageNeeded(self, build): if 'pullrequesturl' in build['properties']: return super().isMessageNeeded(build) return False def workerMissing(self, key, worker): # a comment is always associated to a change pass def sendComment(self, pr_url, text): path = urlparse(unicode2bytes(pr_url)).path payload = {'text': text} return self._http.post(COMMENT_API_URL.format( path=bytes2unicode(path)), json=payload) @defer.inlineCallbacks def sendMessage(self, body, subject=None, type=None, builderName=None, results=None, builds=None, users=None, patches=None, logs=None, worker=None): pr_urls = set() for build in builds: props = Properties.fromDict(build['properties']) pr_urls.add(props.getProperty("pullrequesturl")) for pr_url in pr_urls: try: res = yield self.sendComment( pr_url=pr_url, text=body ) if res.code not in (HTTP_CREATED,): content = yield res.content() log.msg("{code}: Unable to send a comment: " "{content}".format(code=res.code, content=content)) elif self.verbose: log.msg('Comment sent to {url}'.format(url=pr_url)) except Exception as e: log.err(e, 'Failed to send a comment to "{}"'.format(pr_url)) buildbot-2.6.0/master/buildbot/reporters/gerrit.py000066400000000000000000000370111361162603000223150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Push events to Gerrit """ import time import warnings from distutils.version import LooseVersion from twisted.internet import defer from twisted.internet import reactor from twisted.internet.protocol import ProcessProtocol from twisted.python import log from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.reporters import utils from buildbot.util import bytes2unicode from buildbot.util import service # Cache the version that the gerrit server is running for this many seconds GERRIT_VERSION_CACHE_TIMEOUT = 600 GERRIT_LABEL_VERIFIED = 'Verified' GERRIT_LABEL_REVIEWED = 'Code-Review' def makeReviewResult(message, *labels): """ helper to produce a review result """ return dict(message=message, labels=dict(labels)) def _handleLegacyResult(result): """ make sure the result is backward compatible """ if not isinstance(result, dict): warnings.warn('The Gerrit status callback uses the old way to ' 'communicate results. The outcome might be not what is ' 'expected.') message, verified, reviewed = result result = makeReviewResult(message, (GERRIT_LABEL_VERIFIED, verified), (GERRIT_LABEL_REVIEWED, reviewed)) return result def _old_add_label(label, value): if label == GERRIT_LABEL_VERIFIED: return ["--verified %d" % int(value)] elif label == GERRIT_LABEL_REVIEWED: return ["--code-review %d" % int(value)] warnings.warn('Gerrit older than 2.6 does not support custom labels. ' 'Setting %s is ignored.' % label) return [] def _new_add_label(label, value): return ["--label %s=%d" % (label, int(value))] def defaultReviewCB(builderName, build, result, master, arg): if result == RETRY: return makeReviewResult(None) message = "Buildbot finished compiling your patchset\n" message += "on configuration: %s\n" % builderName message += "The result is: %s\n" % Results[result].upper() return makeReviewResult(message, (GERRIT_LABEL_VERIFIED, result == SUCCESS or -1)) def defaultSummaryCB(buildInfoList, results, master, arg): success = False failure = False msgs = [] for buildInfo in buildInfoList: msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo link = buildInfo.get('url', None) if link: msg += " - " + link else: msg += "." msgs.append(msg) if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if success and not failure: verified = 1 else: verified = -1 return makeReviewResult('\n\n'.join(msgs), (GERRIT_LABEL_VERIFIED, verified)) # These are just sentinel values for GerritStatusPush.__init__ args class DEFAULT_REVIEW: pass class DEFAULT_SUMMARY: pass class GerritStatusPush(service.BuildbotService): """Event streamer to a gerrit ssh server.""" name = "GerritStatusPush" gerrit_server = None gerrit_username = None gerrit_port = None gerrit_version_time = None gerrit_version = None gerrit_identity_file = None reviewCB = None reviewArg = None startCB = None startArg = None summaryCB = None summaryArg = None wantSteps = False wantLogs = False _gerrit_notify = None def reconfigService(self, server, username, reviewCB=DEFAULT_REVIEW, startCB=None, port=29418, reviewArg=None, startArg=None, summaryCB=DEFAULT_SUMMARY, summaryArg=None, identity_file=None, builders=None, notify=None, wantSteps=False, wantLogs=False): # If neither reviewCB nor summaryCB were specified, default to sending # out "summary" reviews. But if we were given a reviewCB and only a # reviewCB, disable the "summary" reviews, so we don't send out both # by default. if reviewCB is DEFAULT_REVIEW and summaryCB is DEFAULT_SUMMARY: reviewCB = None summaryCB = defaultSummaryCB if reviewCB is DEFAULT_REVIEW: reviewCB = None if summaryCB is DEFAULT_SUMMARY: summaryCB = None # Parameters. self.gerrit_server = server self.gerrit_username = username self.gerrit_port = port self.gerrit_version = None self.gerrit_version_time = 0 self.gerrit_identity_file = identity_file self.reviewCB = reviewCB self.reviewArg = reviewArg self.startCB = startCB self.startArg = startArg self.summaryCB = summaryCB self.summaryArg = summaryArg self.builders = builders self._gerrit_notify = notify self.wantSteps = wantSteps self.wantLogs = wantLogs def _gerritCmd(self, *args): '''Construct a command as a list of strings suitable for :func:`subprocess.call`. ''' if self.gerrit_identity_file is not None: options = ['-i', self.gerrit_identity_file] else: options = [] return ['ssh'] + options + [ '@'.join((self.gerrit_username, self.gerrit_server)), '-p', str(self.gerrit_port), 'gerrit' ] + list(args) class VersionPP(ProcessProtocol): def __init__(self, func): self.func = func self.gerrit_version = None def outReceived(self, data): vstr = b"gerrit version " if not data.startswith(vstr): log.msg(b"Error: Cannot interpret gerrit version info: " + data) return vers = data[len(vstr):].strip() log.msg(b"gerrit version: " + vers) self.gerrit_version = LooseVersion(bytes2unicode(vers)) def errReceived(self, data): log.msg(b"gerriterr: " + data) def processEnded(self, status_object): if status_object.value.exitCode: log.msg("gerrit version status: ERROR:", status_object) return if self.gerrit_version: self.func(self.gerrit_version) def getCachedVersion(self): if self.gerrit_version is None: return None if time.time() - self.gerrit_version_time > GERRIT_VERSION_CACHE_TIMEOUT: # cached version has expired self.gerrit_version = None return self.gerrit_version def processVersion(self, gerrit_version, func): self.gerrit_version = gerrit_version self.gerrit_version_time = time.time() func() def callWithVersion(self, func): command = self._gerritCmd("version") def callback(gerrit_version): return self.processVersion(gerrit_version, func) self.spawnProcess(self.VersionPP(callback), command[0], command, env=None) class LocalPP(ProcessProtocol): def __init__(self, status): self.status = status def outReceived(self, data): log.msg("gerritout:", data) def errReceived(self, data): log.msg("gerriterr:", data) def processEnded(self, status_object): if status_object.value.exitCode: log.msg("gerrit status: ERROR:", status_object) else: log.msg("gerrit status: OK") @defer.inlineCallbacks def startService(self): yield super().startService() startConsuming = self.master.mq.startConsuming self._buildsetCompleteConsumer = yield startConsuming( self.buildsetComplete, ('buildsets', None, 'complete')) self._buildCompleteConsumer = yield startConsuming( self.buildComplete, ('builds', None, 'finished')) self._buildStartedConsumer = yield startConsuming( self.buildStarted, ('builds', None, 'new')) def stopService(self): self._buildsetCompleteConsumer.stopConsuming() self._buildCompleteConsumer.stopConsuming() self._buildStartedConsumer.stopConsuming() @defer.inlineCallbacks def buildStarted(self, key, build): if self.startCB is None: return yield self.getBuildDetails(build) if self.isBuildReported(build): result = yield self.startCB(build['builder']['name'], build, self.startArg) self.sendCodeReviews(build, result) @defer.inlineCallbacks def buildComplete(self, key, build): if self.reviewCB is None: return yield self.getBuildDetails(build) if self.isBuildReported(build): result = yield self.reviewCB(build['builder']['name'], build, build['results'], self.master, self.reviewArg) result = _handleLegacyResult(result) self.sendCodeReviews(build, result) @defer.inlineCallbacks def getBuildDetails(self, build): br = yield self.master.data.get(("buildrequests", build['buildrequestid'])) buildset = yield self.master.data.get(("buildsets", br['buildsetid'])) yield utils.getDetailsForBuilds(self.master, buildset, [build], wantProperties=True, wantSteps=self.wantSteps) def isBuildReported(self, build): return self.builders is None or build['builder']['name'] in self.builders @defer.inlineCallbacks def buildsetComplete(self, key, msg): if not self.summaryCB: return bsid = msg['bsid'] res = yield utils.getDetailsForBuildset( self.master, bsid, wantProperties=True, wantSteps=self.wantSteps, wantLogs=self.wantLogs) builds = res['builds'] buildset = res['buildset'] self.sendBuildSetSummary(buildset, builds) @defer.inlineCallbacks def sendBuildSetSummary(self, buildset, builds): builds = [build for build in builds if self.isBuildReported(build)] if builds and self.summaryCB: def getBuildInfo(build): result = build['results'] resultText = { SUCCESS: "succeeded", FAILURE: "failed", WARNINGS: "completed with warnings", EXCEPTION: "encountered an exception", }.get(result, "completed with unknown result %d" % result) return {'name': build['builder']['name'], 'result': result, 'resultText': resultText, 'text': build['state_string'], 'url': utils.getURLForBuild(self.master, build['builder']['builderid'], build['number']), 'build': build } buildInfoList = sorted( [getBuildInfo(build) for build in builds], key=lambda bi: bi['name']) result = yield self.summaryCB(buildInfoList, Results[buildset['results']], self.master, self.summaryArg) result = _handleLegacyResult(result) self.sendCodeReviews(builds[0], result) def sendCodeReviews(self, build, result): message = result.get('message', None) if message is None: return def getProperty(build, name): return build['properties'].get(name, [None])[0] # Gerrit + Repo downloads = getProperty(build, "repo_downloads") downloaded = getProperty(build, "repo_downloaded") if downloads is not None and downloaded is not None: downloaded = downloaded.split(" ") if downloads and 2 * len(downloads) == len(downloaded): for i, download in enumerate(downloads): try: project, change1 = download.split(" ") except ValueError: return # something is wrong, abort change2 = downloaded[2 * i] revision = downloaded[2 * i + 1] if change1 == change2: self.sendCodeReview(project, revision, result) else: return # something is wrong, abort return # Gerrit + Git # used only to verify Gerrit source if getProperty(build, "event.change.id") is not None: project = getProperty(build, "event.change.project") codebase = getProperty(build, "codebase") revision = (getProperty(build, "event.patchSet.revision") or getProperty(build, "got_revision") or getProperty(build, "revision")) if isinstance(revision, dict): # in case of the revision is a codebase revision, we just take # the revisionfor current codebase if codebase is not None: revision = revision[codebase] else: revision = None if project is not None and revision is not None: self.sendCodeReview(project, revision, result) return def sendCodeReview(self, project, revision, result): gerrit_version = self.getCachedVersion() if gerrit_version is None: self.callWithVersion( lambda: self.sendCodeReview(project, revision, result)) return assert gerrit_version command = self._gerritCmd("review", "--project %s" % (project,)) if gerrit_version >= LooseVersion("2.13"): command.append('--tag autogenerated:buildbot') if self._gerrit_notify is not None: command.append('--notify %s' % str(self._gerrit_notify)) message = result.get('message', None) if message: command.append("--message '%s'" % message.replace("'", "\"")) labels = result.get('labels', None) if labels: if gerrit_version < LooseVersion("2.6"): add_label = _old_add_label else: add_label = _new_add_label for label, value in labels.items(): command.extend(add_label(label, value)) command.append(revision) command = [str(s) for s in command] self.spawnProcess(self.LocalPP(self), command[0], command, env=None) def spawnProcess(self, *arg, **kw): reactor.spawnProcess(*arg, **kw) buildbot-2.6.0/master/buildbot/reporters/gerrit_verify_status.py000066400000000000000000000205731361162603000253110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import http from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() class GerritVerifyStatusPush(http.HttpStatusPushBase): name = "GerritVerifyStatusPush" neededDetails = dict(wantProperties=True) # overridable constants RESULTS_TABLE = { SUCCESS: 1, WARNINGS: 1, FAILURE: -1, SKIPPED: 0, EXCEPTION: 0, RETRY: 0, CANCELLED: 0 } DEFAULT_RESULT = -1 @defer.inlineCallbacks def reconfigService(self, baseURL, auth, startDescription=None, endDescription=None, verification_name=None, abstain=False, category=None, reporter=None, verbose=False, **kwargs): auth = yield self.renderSecrets(auth) yield super().reconfigService(**kwargs) if baseURL.endswith('/'): baseURL = baseURL[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, auth=auth, debug=self.debug, verify=self.verify) self._verification_name = verification_name or Interpolate( '%(prop:buildername)s') self._reporter = reporter or "buildbot" self._abstain = abstain self._category = category self._startDescription = startDescription or 'Build started.' self._endDescription = endDescription or 'Build done.' self._verbose = verbose def createStatus(self, change_id, revision_id, name, value, abstain=None, rerun=None, comment=None, url=None, reporter=None, category=None, duration=None): """ Abstract the POST REST api documented here: https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md :param change_id: The change_id for the change tested (can be in the long form e.g: myProject~master~I8473b95934b5732ac55d26311a706c9c2bde9940 or in the short integer form). :param revision_id: the revision_id tested can be the patchset number or the commit id (short or long). :param name: The name of the job. :param value: The pass/fail result for this job: -1: fail 0: unstable, 1: succeed :param abstain: Whether the value counts as a vote (defaults to false) :param rerun: Whether this result is from a re-test on the same patchset :param comment: A short comment about this job :param url: The url link to more info about this job :reporter: The user that verified this job :category: A category for this job "duration": The time it took to run this job :return: A deferred with the result from Gerrit. """ payload = {'name': name, 'value': value} if abstain is not None: payload['abstain'] = abstain if rerun is not None: payload['rerun'] = rerun if comment is not None: payload['comment'] = comment if url is not None: payload['url'] = url if reporter is not None: payload['reporter'] = reporter if category is not None: payload['category'] = category if duration is not None: payload['duration'] = duration if self._verbose: log.debug( 'Sending Gerrit status for {change_id}/{revision_id}: data={data}', change_id=change_id, revision_id=revision_id, data=payload) return self._http.post( '/'.join([ '/a/changes', str(change_id), 'revisions', str(revision_id), 'verify-status~verifications' ]), json=payload) def formatDuration(self, duration): """Format the duration. This method could be overridden if really needed, as the duration format in gerrit is an arbitrary string. :param duration: duration in timedelta """ days = duration.days hours, remainder = divmod(duration.seconds, 3600) minutes, seconds = divmod(remainder, 60) if days: return '{} day{} {}h {}m {}s'.format(days, "s" if days > 1 else "", hours, minutes, seconds) elif hours: return '{}h {}m {}s'.format(hours, minutes, seconds) return '{}m {}s'.format(minutes, seconds) @staticmethod def getGerritChanges(props): """ Get the gerrit changes This method could be overridden if really needed to accommodate for other custom steps method for fetching gerrit changes. :param props: an IProperty :return: (optionally via deferred) a list of dictionary with at list change_id, and revision_id, which format is the one accepted by the gerrit REST API as of /changes/:change_id/revision/:revision_id paths (see gerrit doc) """ if 'gerrit_changes' in props: return props.getProperty('gerrit_changes') if 'event.change.number' in props: return [{ 'change_id': props.getProperty('event.change.number'), 'revision_id': props.getProperty('event.patchSet.number') }] return [] @defer.inlineCallbacks def send(self, build): props = Properties.fromDict(build['properties']) if build['complete']: value = self.RESULTS_TABLE.get(build['results'], self.DEFAULT_RESULT) comment = yield props.render(self._endDescription) duration = self.formatDuration(build['complete_at'] - build[ 'started_at']) else: value = 0 comment = yield props.render(self._startDescription) duration = 'pending' name = yield props.render(self._verification_name) reporter = yield props.render(self._reporter) category = yield props.render(self._category) abstain = yield props.render(self._abstain) # TODO: find reliable way to find out whether its a rebuild rerun = None changes = yield self.getGerritChanges(props) for change in changes: try: yield self.createStatus( change['change_id'], change['revision_id'], name, value, abstain=abstain, rerun=rerun, comment=comment, url=build['url'], reporter=reporter, category=category, duration=duration) except Exception: log.failure( 'Failed to send status!', failure=failure.Failure()) buildbot-2.6.0/master/buildbot/reporters/github.py000066400000000000000000000205421361162603000223040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import http from buildbot.util import httpclientservice from buildbot.util.giturlparse import giturlparse HOSTED_BASE_URL = 'https://api.github.com' class GitHubStatusPush(http.HttpStatusPushBase): name = "GitHubStatusPush" neededDetails = dict(wantProperties=True) @defer.inlineCallbacks def reconfigService(self, token, startDescription=None, endDescription=None, context=None, baseURL=None, verbose=False, **kwargs): token = yield self.renderSecrets(token) yield super().reconfigService(**kwargs) self.setDefaults(context, startDescription, endDescription) if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers={ 'Authorization': 'token ' + token, 'User-Agent': 'Buildbot' }, debug=self.debug, verify=self.verify) self.verbose = verbose def setDefaults(self, context, startDescription, endDescription): self.context = context or Interpolate('buildbot/%(prop:buildername)s') self.startDescription = startDescription or 'Build started.' self.endDescription = endDescription or 'Build done.' def createStatus(self, repo_user, repo_name, sha, state, target_url=None, context=None, issue=None, description=None): """ :param repo_user: GitHub user or organization :param repo_name: Name of the repository :param sha: Full sha to create the status for. :param state: one of the following 'pending', 'success', 'error' or 'failure'. :param target_url: Target url to associate with this status. :param description: Short description of the status. :param context: Build context :return: A deferred with the result from GitHub. This code comes from txgithub by @tomprince. txgithub is based on twisted's webclient agent, which is much less reliable and featureful as txrequest (support for proxy, connection pool, keep alive, retry, etc) """ payload = {'state': state} if description is not None: payload['description'] = description if target_url is not None: payload['target_url'] = target_url if context is not None: payload['context'] = context return self._http.post( '/'.join(['/repos', repo_user, repo_name, 'statuses', sha]), json=payload) @defer.inlineCallbacks def send(self, build): props = Properties.fromDict(build['properties']) props.master = self.master if build['complete']: state = { SUCCESS: 'success', WARNINGS: 'success', FAILURE: 'failure', SKIPPED: 'success', EXCEPTION: 'error', RETRY: 'pending', CANCELLED: 'error' }.get(build['results'], 'error') description = yield props.render(self.endDescription) elif self.startDescription: state = 'pending' description = yield props.render(self.startDescription) else: return context = yield props.render(self.context) sourcestamps = build['buildset'].get('sourcestamps') if not sourcestamps or not sourcestamps[0]: return project = sourcestamps[0]['project'] branch = props['branch'] m = re.search(r"refs/pull/([0-9]*)/merge", branch) if m: issue = m.group(1) else: issue = None if "/" in project: repoOwner, repoName = project.split('/') else: giturl = giturlparse(sourcestamps[0]['repository']) repoOwner = giturl.owner repoName = giturl.repo if self.verbose: log.msg("Updating github status: repoOwner={repoOwner}, repoName={repoName}".format( repoOwner=repoOwner, repoName=repoName)) for sourcestamp in sourcestamps: sha = sourcestamp['revision'] response = None try: repo_user = repoOwner repo_name = repoName target_url = build['url'] response = yield self.createStatus( repo_user=repo_user, repo_name=repo_name, sha=sha, state=state, target_url=target_url, context=context, issue=issue, description=description ) if not self.isStatus2XX(response.code): raise Exception() if self.verbose: log.msg( 'Updated status with "{state}" for {repoOwner}/{repoName} ' 'at {sha}, context "{context}", issue {issue}.'.format( state=state, repoOwner=repoOwner, repoName=repoName, sha=sha, issue=issue, context=context)) except Exception as e: if response: content = yield response.content() code = response.code else: content = code = "n/a" log.err( e, 'Failed to update "{state}" for {repoOwner}/{repoName} ' 'at {sha}, context "{context}", issue {issue}. ' 'http {code}, {content}'.format( state=state, repoOwner=repoOwner, repoName=repoName, sha=sha, issue=issue, context=context, code=code, content=content)) class GitHubCommentPush(GitHubStatusPush): name = "GitHubCommentPush" neededDetails = dict(wantProperties=True) def setDefaults(self, context, startDescription, endDescription): self.context = '' self.startDescription = startDescription self.endDescription = endDescription or 'Build done.' def createStatus(self, repo_user, repo_name, sha, state, target_url=None, context=None, issue=None, description=None): """ :param repo_user: GitHub user or organization :param repo_name: Name of the repository :param issue: Pull request number :param state: one of the following 'pending', 'success', 'error' or 'failure'. :param description: Short description of the status. :return: A deferred with the result from GitHub. This code comes from txgithub by @tomprince. txgithub is based on twisted's webclient agent, which is much less reliable and featureful as txrequest (support for proxy, connection pool, keep alive, retry, etc) """ payload = {'body': description} return self._http.post( '/'.join(['/repos', repo_user, repo_name, 'issues', issue, 'comments']), json=payload) buildbot-2.6.0/master/buildbot/reporters/gitlab.py000066400000000000000000000162611361162603000222670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from urllib.parse import quote_plus as urlquote_plus from twisted.internet import defer from twisted.python import log from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import http from buildbot.util import giturlparse from buildbot.util import httpclientservice HOSTED_BASE_URL = 'https://gitlab.com' class GitLabStatusPush(http.HttpStatusPushBase): name = "GitLabStatusPush" neededDetails = dict(wantProperties=True) @defer.inlineCallbacks def reconfigService(self, token, startDescription=None, endDescription=None, context=None, baseURL=None, verbose=False, **kwargs): token = yield self.renderSecrets(token) yield super().reconfigService(**kwargs) self.context = context or Interpolate('buildbot/%(prop:buildername)s') self.startDescription = startDescription or 'Build started.' self.endDescription = endDescription or 'Build done.' if baseURL is None: baseURL = HOSTED_BASE_URL if baseURL.endswith('/'): baseURL = baseURL[:-1] self.baseURL = baseURL self._http = yield httpclientservice.HTTPClientService.getService( self.master, baseURL, headers={'PRIVATE-TOKEN': token}, debug=self.debug, verify=self.verify) self.verbose = verbose self.project_ids = {} def createStatus(self, project_id, branch, sha, state, target_url=None, description=None, context=None): """ :param project_id: Project ID from GitLab :param branch: Branch name to create the status for. :param sha: Full sha to create the status for. :param state: one of the following 'pending', 'success', 'failed' or 'cancelled'. :param target_url: Target url to associate with this status. :param description: Short description of the status. :param context: Context of the result :return: A deferred with the result from GitLab. """ payload = {'state': state, 'ref': branch} if description is not None: payload['description'] = description if target_url is not None: payload['target_url'] = target_url if context is not None: payload['name'] = context return self._http.post('/api/v4/projects/%d/statuses/%s' % ( project_id, sha), json=payload) @defer.inlineCallbacks def getProjectId(self, sourcestamp): # retrieve project id via cache url = giturlparse(sourcestamp['repository']) if url is None: return None project_full_name = "%s/%s" % (url.owner, url.repo) # gitlab needs project name to be fully url quoted to get the project id project_full_name = urlquote_plus(project_full_name) if project_full_name not in self.project_ids: response = yield self._http.get('/api/v4/projects/%s' % (project_full_name)) proj = yield response.json() if response.code not in (200, ): log.msg( 'Unknown (or hidden) gitlab project' '{repo}: {message}'.format( repo=project_full_name, **proj)) return None self.project_ids[project_full_name] = proj['id'] return self.project_ids[project_full_name] @defer.inlineCallbacks def send(self, build): props = Properties.fromDict(build['properties']) props.master = self.master if build['complete']: state = { SUCCESS: 'success', WARNINGS: 'success', FAILURE: 'failed', SKIPPED: 'success', EXCEPTION: 'failed', RETRY: 'pending', CANCELLED: 'cancelled' }.get(build['results'], 'failed') description = yield props.render(self.endDescription) else: state = 'running' description = yield props.render(self.startDescription) context = yield props.render(self.context) sourcestamps = build['buildset']['sourcestamps'] # FIXME: probably only want to report status for the last commit in the changeset for sourcestamp in sourcestamps: sha = sourcestamp['revision'] if 'source_project_id' in props: proj_id = props['source_project_id'] else: proj_id = yield self.getProjectId(sourcestamp) if proj_id is None: continue try: if 'source_branch' in props: branch = props['source_branch'] else: branch = sourcestamp['branch'] target_url = build['url'] res = yield self.createStatus( project_id=proj_id, branch=branch, sha=sha, state=state, target_url=target_url, context=context, description=description ) if res.code not in (200, 201, 204): message = yield res.json() message = message.get('message', 'unspecified error') log.msg( 'Could not send status "{state}" for ' '{repo} at {sha}: {message}'.format( state=state, repo=sourcestamp['repository'], sha=sha, message=message)) elif self.verbose: log.msg( 'Status "{state}" sent for ' '{repo} at {sha}.'.format( state=state, repo=sourcestamp['repository'], sha=sha)) except Exception as e: log.err( e, 'Failed to send status "{state}" for ' '{repo} at {sha}'.format( state=state, repo=sourcestamp['repository'], sha=sha )) buildbot-2.6.0/master/buildbot/reporters/hipchat.py000066400000000000000000000102201361162603000224320ustar00rootroot00000000000000from twisted.internet import defer from buildbot import config from buildbot.process.results import statusToString from buildbot.reporters import utils from buildbot.reporters.http import HttpStatusPushBase from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() HOSTED_BASE_URL = "https://api.hipchat.com" class HipChatStatusPush(HttpStatusPushBase): name = "HipChatStatusPush" def checkConfig(self, auth_token, endpoint=HOSTED_BASE_URL, builder_room_map=None, builder_user_map=None, event_messages=None, **kwargs): if not isinstance(auth_token, str): config.error('auth_token must be a string') if not isinstance(endpoint, str): config.error('endpoint must be a string') if builder_room_map and not isinstance(builder_room_map, dict): config.error('builder_room_map must be a dict') if builder_user_map and not isinstance(builder_user_map, dict): config.error('builder_user_map must be a dict') @defer.inlineCallbacks def reconfigService(self, auth_token, endpoint="https://api.hipchat.com", builder_room_map=None, builder_user_map=None, event_messages=None, **kwargs): auth_token = yield self.renderSecrets(auth_token) yield super().reconfigService(**kwargs) self._http = yield httpclientservice.HTTPClientService.getService( self.master, endpoint, debug=self.debug, verify=self.verify) self.auth_token = auth_token self.builder_room_map = builder_room_map self.builder_user_map = builder_user_map # returns a Deferred that returns None def buildStarted(self, key, build): return self.send(build, key[2]) # returns a Deferred that returns None def buildFinished(self, key, build): return self.send(build, key[2]) @defer.inlineCallbacks def getBuildDetailsAndSendMessage(self, build, key): yield utils.getDetailsForBuild(self.master, build, **self.neededDetails) postData = yield self.getRecipientList(build, key) postData['message'] = yield self.getMessage(build, key) extra_params = yield self.getExtraParams(build, key) postData.update(extra_params) return postData def getRecipientList(self, build, event_name): result = {} builder_name = build['builder']['name'] if self.builder_user_map and builder_name in self.builder_user_map: result['id_or_email'] = self.builder_user_map[builder_name] if self.builder_room_map and builder_name in self.builder_room_map: result['room_id_or_name'] = self.builder_room_map[builder_name] return result def getMessage(self, build, event_name): event_messages = { 'new': 'Buildbot started build %s here: %s' % (build['builder']['name'], build['url']), 'finished': 'Buildbot finished build %s with result %s here: %s' % (build['builder']['name'], statusToString(build['results']), build['url']) } return event_messages.get(event_name, '') # use this as an extension point to inject extra parameters into your # postData def getExtraParams(self, build, event_name): return {} @defer.inlineCallbacks def send(self, build, key): postData = yield self.getBuildDetailsAndSendMessage(build, key) if not postData or 'message' not in postData or not postData['message']: return urls = [] if 'id_or_email' in postData: urls.append('/v2/user/{}/message'.format(postData.pop('id_or_email'))) if 'room_id_or_name' in postData: urls.append('/v2/room/{}/notification'.format(postData.pop('room_id_or_name'))) for url in urls: response = yield self._http.post(url, params=dict(auth_token=self.auth_token), json=postData) if response.code != 200: content = yield response.content() log.error("{code}: unable to upload status: {content}", code=response.code, content=content) buildbot-2.6.0/master/buildbot/reporters/http.py000066400000000000000000000104721361162603000220020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc import copy from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.reporters import utils from buildbot.util import httpclientservice from buildbot.util import service class HttpStatusPushBase(service.BuildbotService): neededDetails = dict() def checkConfig(self, *args, **kwargs): super().checkConfig() httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) if not isinstance(kwargs.get('builders'), (type(None), list)): config.error("builders must be a list or None") @defer.inlineCallbacks def reconfigService(self, builders=None, debug=None, verify=None, **kwargs): yield super().reconfigService() self.debug = debug self.verify = verify self.builders = builders self.neededDetails = copy.copy(self.neededDetails) for k, v in kwargs.items(): if k.startswith("want"): self.neededDetails[k] = v @defer.inlineCallbacks def startService(self): yield super().startService() startConsuming = self.master.mq.startConsuming self._buildCompleteConsumer = yield startConsuming( self.buildFinished, ('builds', None, 'finished')) self._buildStartedConsumer = yield startConsuming( self.buildStarted, ('builds', None, 'new')) def stopService(self): self._buildCompleteConsumer.stopConsuming() self._buildStartedConsumer.stopConsuming() def buildStarted(self, key, build): return self.getMoreInfoAndSend(build) def buildFinished(self, key, build): return self.getMoreInfoAndSend(build) def filterBuilds(self, build): if self.builders is not None: return build['builder']['name'] in self.builders return True @defer.inlineCallbacks def getMoreInfoAndSend(self, build): yield utils.getDetailsForBuild(self.master, build, **self.neededDetails) if self.filterBuilds(build): yield self.send(build) @abc.abstractmethod def send(self, build): pass def isStatus2XX(self, code): return code // 100 == 2 class HttpStatusPush(HttpStatusPushBase): name = "HttpStatusPush" secrets = ['user', 'password', "auth"] def checkConfig(self, serverUrl, user=None, password=None, auth=None, format_fn=None, **kwargs): if user is not None and auth is not None: config.error("Only one of user/password or auth must be given") if user is not None: config.warnDeprecated("0.9.1", "user/password is deprecated, use 'auth=(user, password)'") if (format_fn is not None) and not callable(format_fn): config.error("format_fn must be a function") super().checkConfig(**kwargs) @defer.inlineCallbacks def reconfigService(self, serverUrl, user=None, password=None, auth=None, format_fn=None, **kwargs): yield super().reconfigService(**kwargs) if user is not None: auth = (user, password) if format_fn is None: self.format_fn = lambda x: x else: self.format_fn = format_fn self._http = yield httpclientservice.HTTPClientService.getService( self.master, serverUrl, auth=auth, debug=self.debug, verify=self.verify) @defer.inlineCallbacks def send(self, build): response = yield self._http.post("", json=self.format_fn(build)) if not self.isStatus2XX(response.code): log.msg("%s: unable to upload status: %s" % (response.code, response.content)) buildbot-2.6.0/master/buildbot/reporters/irc.py000066400000000000000000000475151361162603000216100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.application import internet from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from twisted.python import log from twisted.words.protocols import irc from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.words import Channel from buildbot.reporters.words import Contact from buildbot.reporters.words import StatusBot from buildbot.reporters.words import ThrottledClientFactory from buildbot.reporters.words import dangerousCommand from buildbot.util import service from buildbot.util import ssl class UsageError(ValueError): # pylint: disable=useless-super-delegation def __init__(self, string="Invalid usage", *more): # This is not useless as we change the default value of an argument. # This bug is reported as "fixed" but apparently, it is not. # https://github.com/PyCQA/pylint/issues/1085 # (Maybe there is a problem with builtin exceptions). super().__init__(string, *more) _irc_colors = ( 'WHITE', 'BLACK', 'NAVY_BLUE', 'GREEN', 'RED', 'BROWN', 'PURPLE', 'OLIVE', 'YELLOW', 'LIME_GREEN', 'TEAL', 'AQUA_LIGHT', 'ROYAL_BLUE', 'PINK', 'DARK_GRAY', 'LIGHT_GRAY' ) class IRCChannel(Channel): def __init__(self, bot, channel): super().__init__(bot, channel) self.muted = False def send(self, message, **kwargs): if self.id[0] in irc.CHANNEL_PREFIXES: send = self.bot.groupSend else: send = self.bot.msg if not self.muted: send(self.id, message) def act(self, action): if self.muted: return self.bot.groupDescribe(self.id, action) class IRCContact(Contact): def __init__(self, user, channel=None): if channel is None: channel = user super().__init__(user, channel) def act(self, action): return self.channel.act(action) def handleAction(self, action): # this is sent when somebody performs an action that mentions the # buildbot (like '/me kicks buildbot'). 'self.user' is the name/nick/id of # the person who performed the action, so if their action provokes a # response, they can be named. This is 100% silly. if not action.endswith("s " + self.bot.nickname): return words = action.split() verb = words[-2] if verb == "kicks": response = "%s back" % verb elif verb == "threatens": response = "hosts a red wedding for %s" % self.user_id else: response = "%s %s too" % (verb, self.user_id) self.act(response) @defer.inlineCallbacks def op_required(self, command): if self.is_private_chat or self.user_id in self.bot.authz.get(command.upper(), ()): return False ops = yield self.bot.getChannelOps(self.channel.id) return self.user_id not in ops # IRC only commands @dangerousCommand def command_JOIN(self, args, **kwargs): """join a channel""" args = self.splitArgs(args) for channel in args: self.bot.join(channel) command_JOIN.usage = "join #channel - join a channel #channel" @dangerousCommand def command_LEAVE(self, args, **kwargs): """join a channel""" args = self.splitArgs(args) for channel in args: self.bot.leave(channel) command_LEAVE.usage = "leave #channel - leave a channel #channel" @defer.inlineCallbacks def command_MUTE(self, args, **kwargs): if (yield self.op_required('mute')): yield self.send("Only channel operators or explicitly allowed users " "can mute me here, {}... Blah, blah, blah...".format(self.user_id)) return # The order of these is important! ;) yield self.send("Shutting up for now.") self.channel.muted = True command_MUTE.usage = "mute - suppress all messages until a corresponding 'unmute' is issued" @defer.inlineCallbacks def command_UNMUTE(self, args, **kwargs): if self.channel.muted: if (yield self.op_required('mute')): return # The order of these is important! ;) self.channel.muted = False yield self.send("I'm baaaaaaaaaaack!") else: yield self.send( "No one had told me to be quiet, but it's the thought that counts, right?") command_UNMUTE.usage = "unmute - disable a previous 'mute'" @defer.inlineCallbacks @Contact.overrideCommand def command_NOTIFY(self, args, **kwargs): if not self.is_private_chat: argv = self.splitArgs(args) if argv and argv[0] in ('on', 'off') and \ (yield self.op_required('notify')): yield self.send("Only channel operators can change notified events for this channel. " "And you, {}, are neither!" .format(self.user_id)) return super().command_NOTIFY(args, **kwargs) def command_DANCE(self, args, **kwargs): """dance, dance academy...""" reactor.callLater(1.0, self.send, "<(^.^<)") reactor.callLater(2.0, self.send, "<(^.^)>") reactor.callLater(3.0, self.send, "(>^.^)>") reactor.callLater(3.5, self.send, "(7^.^)7") reactor.callLater(5.0, self.send, "(>^.^<)") def command_DESTROY(self, args): if self.bot.nickname not in args: self.act("readies phasers") else: self.send("Better destroy yourself, {}!".format(self.user_id)) def command_HUSTLE(self, args): self.act("does the hustle") command_HUSTLE.usage = "dondon on #qutebrowser: qutebrowser-bb needs to learn to do the hustle" class IrcStatusBot(StatusBot, irc.IRCClient): """I represent the buildbot to an IRC server. """ contactClass = IRCContact channelClass = IRCChannel def __init__(self, nickname, password, join_channels, pm_to_nicks, noticeOnChannel, *args, useColors=False, **kwargs): super().__init__(*args, **kwargs) self.nickname = nickname self.join_channels = join_channels self.pm_to_nicks = pm_to_nicks self.password = password self.hasQuit = 0 self.noticeOnChannel = noticeOnChannel self.useColors = useColors self._keepAliveCall = task.LoopingCall( lambda: self.ping(self.nickname)) self._channel_names = {} def connectionMade(self): super().connectionMade() self._keepAliveCall.start(60) def connectionLost(self, reason): if self._keepAliveCall.running: self._keepAliveCall.stop() super().connectionLost(reason) # The following methods are called when we write something. def groupSend(self, channel, message): if self.noticeOnChannel: self.notice(channel, message) else: self.msg(channel, message) def groupDescribe(self, channel, action): self.describe(channel, action) def getContact(self, user, channel=None): # nicknames and channel names are case insensitive user = user.lower() if channel is None: channel = user channel = channel.lower() return super().getContact(user, channel) # the following irc.IRCClient methods are called when we have input def privmsg(self, user, channel, message): user = user.split('!', 1)[0] # rest is ~user@hostname # channel is '#twisted' or 'buildbot' (for private messages) if channel == self.nickname: # private message contact = self.getContact(user=user) d = contact.handleMessage(message) return d # else it's a broadcast message, maybe for us, maybe not. 'channel' # is '#twisted' or the like. contact = self.getContact(user=user, channel=channel) if message.startswith("%s:" % self.nickname) or message.startswith("%s," % self.nickname): message = message[len("%s:" % self.nickname):] d = contact.handleMessage(message) return d def action(self, user, channel, data): user = user.split('!', 1)[0] # rest is ~user@hostname # somebody did an action (/me actions) in the broadcast channel contact = self.getContact(user=user, channel=channel) if self.nickname in data: contact.handleAction(data) def signedOn(self): if self.password: self.msg("Nickserv", "IDENTIFY " + self.password) for c in self.join_channels: if isinstance(c, dict): channel = c.get('channel', None) password = c.get('password', None) else: channel = c password = None self.join(channel=channel, key=password) for c in self.pm_to_nicks: contact = self.getContact(c) contact.channel.add_notification_events(self.notify_events) self.loadState() def getNames(self, channel): channel = channel.lower() d = defer.Deferred() callbacks = self._channel_names.setdefault(channel, ([], []))[0] callbacks.append(d) self.sendLine("NAMES {}".format(channel)) return d def irc_RPL_NAMREPLY(self, prefix, params): channel = params[2].lower() if channel not in self._channel_names: return nicks = params[3].split(' ') nicklist = self._channel_names[channel][1] nicklist += nicks def irc_RPL_ENDOFNAMES(self, prefix, params): channel = params[1].lower() try: callbacks, namelist = self._channel_names.pop(channel) except KeyError: return for cb in callbacks: cb.callback(namelist) @defer.inlineCallbacks def getChannelOps(self, channel): names = yield self.getNames(channel) return [n[1:] for n in names if n[0] in '@&~%'] def joined(self, channel): self.log("Joined %s" % (channel,)) # trigger contact constructor, which in turn subscribes to notify events channel = self.getChannel(channel=channel) channel.add_notification_events(self.notify_events) def left(self, channel): self.log("Left %s" % (channel,)) def kickedFrom(self, channel, kicker, message): self.log("I have been kicked from %s by %s: %s" % (channel, kicker, message)) def userLeft(self, user, channel): if user: user = user.lower() if channel: channel = channel.lower() if (channel, user) in self.contacts: del self.contacts[(channel, user)] def userKicked(self, kickee, channel, kicker, message): self.userLeft(kickee, channel) def userQuit(self, user, quitMessage=None): if user: user = user.lower() for c, u in list(self.contacts): if u == user: del self.contacts[(c, u)] results_colors = { SUCCESS: 'GREEN', WARNINGS: 'YELLOW', FAILURE: 'RED', SKIPPED: 'ROYAL_BLUE', EXCEPTION: 'PURPLE', RETRY: 'AQUA_LIGHT', CANCELLED: 'PINK', } short_results_descriptions = { SUCCESS: ", Success", WARNINGS: ", Warnings", FAILURE: ", Failure", SKIPPED: ", Skipped", EXCEPTION: ", Exception", RETRY: ", Retry", CANCELLED: ", Cancelled", } def format_build_status(self, build, short=False): br = build['results'] if short: text = self.short_results_descriptions[br] else: text = self.results_descriptions[br] if self.useColors: return "\x03{:d}{}\x0f".format( _irc_colors.index(self.results_colors[br]), text) else: return text class IrcStatusFactory(ThrottledClientFactory): protocol = IrcStatusBot shuttingDown = False p = None def __init__(self, nickname, password, join_channels, pm_to_nicks, authz, tags, notify_events, noticeOnChannel=False, useRevisions=False, showBlameList=False, parent=None, lostDelay=None, failedDelay=None, useColors=True): super().__init__(lostDelay=lostDelay, failedDelay=failedDelay) self.nickname = nickname self.password = password self.join_channels = join_channels self.pm_to_nicks = pm_to_nicks self.tags = tags self.authz = authz self.parent = parent self.notify_events = notify_events self.noticeOnChannel = noticeOnChannel self.useRevisions = useRevisions self.showBlameList = showBlameList self.useColors = useColors def __getstate__(self): d = self.__dict__.copy() del d['p'] return d def shutdown(self): self.shuttingDown = True if self.p: self.p.quit("buildmaster reconfigured: bot disconnecting") def buildProtocol(self, address): if self.p: self.p.disownServiceParent() p = self.protocol(self.nickname, self.password, self.join_channels, self.pm_to_nicks, self.noticeOnChannel, self.authz, self.tags, self.notify_events, useColors=self.useColors, useRevisions=self.useRevisions, showBlameList=self.showBlameList) p.setServiceParent(self.parent) p.factory = self self.p = p return p # TODO: I think a shutdown that occurs while the connection is being # established will make this explode def clientConnectionLost(self, connector, reason): if self.shuttingDown: log.msg("not scheduling reconnection attempt") return super().clientConnectionLost(connector, reason) def clientConnectionFailed(self, connector, reason): if self.shuttingDown: log.msg("not scheduling reconnection attempt") return super().clientConnectionFailed(connector, reason) class IRC(service.BuildbotService): name = "IRC" in_test_harness = False f = None compare_attrs = ("host", "port", "nick", "password", "authz", "channels", "pm_to_nicks", "useSSL", "useRevisions", "tags", "useColors", "allowForce", "allowShutdown", "lostDelay", "failedDelay") secrets = ['password'] def checkConfig(self, host, nick, channels, pm_to_nicks=None, port=6667, allowForce=None, tags=None, password=None, notify_events=None, showBlameList=True, useRevisions=False, useSSL=False, lostDelay=None, failedDelay=None, useColors=True, allowShutdown=None, noticeOnChannel=False, authz=None, **kwargs ): deprecated_params = list(kwargs) if deprecated_params: config.error("%s are deprecated" % (",".join(deprecated_params))) # deprecated if allowForce is not None: if authz is not None: config.error("If you specify authz, you must not use allowForce anymore") if allowForce not in (True, False): config.error("allowForce must be boolean, not %r" % (allowForce,)) log.msg('IRC: allowForce is deprecated: use authz instead') if allowShutdown is not None: if authz is not None: config.error("If you specify authz, you must not use allowShutdown anymore") if allowShutdown not in (True, False): config.error("allowShutdown must be boolean, not %r" % (allowShutdown,)) log.msg('IRC: allowShutdown is deprecated: use authz instead') # ### if noticeOnChannel not in (True, False): config.error("noticeOnChannel must be boolean, not %r" % (noticeOnChannel,)) if useSSL: # SSL client needs a ClientContextFactory for some SSL mumbo-jumbo ssl.ensureHasSSL(self.__class__.__name__) if authz is not None: for acl in authz.values(): if not isinstance(acl, (list, tuple, bool)): config.error( "authz values must be bool or a list of nicks") def reconfigService(self, host, nick, channels, pm_to_nicks=None, port=6667, allowForce=None, tags=None, password=None, notify_events=None, showBlameList=True, useRevisions=False, useSSL=False, lostDelay=None, failedDelay=None, useColors=True, allowShutdown=None, noticeOnChannel=False, authz=None, **kwargs ): # need to stash these so we can detect changes later self.host = host self.port = port self.nick = nick self.join_channels = channels if pm_to_nicks is None: pm_to_nicks = [] self.pm_to_nicks = pm_to_nicks self.password = password if authz is None: self.authz = {} else: self.authz = authz self.useRevisions = useRevisions self.tags = tags if notify_events is None: notify_events = {} self.notify_events = notify_events self.noticeOnChannel = noticeOnChannel # deprecated... if allowForce is not None: self.authz[('force', 'stop')] = allowForce if allowShutdown is not None: self.authz[('shutdown')] = allowShutdown # ### # This function is only called in case of reconfig with changes # We don't try to be smart here. Just restart the bot if config has # changed. if self.f is not None: self.f.shutdown() self.f = IrcStatusFactory(self.nick, self.password, self.join_channels, self.pm_to_nicks, self.authz, self.tags, self.notify_events, parent=self, noticeOnChannel=noticeOnChannel, useRevisions=useRevisions, showBlameList=showBlameList, lostDelay=lostDelay, failedDelay=failedDelay, useColors=useColors) if useSSL: cf = ssl.ClientContextFactory() c = internet.SSLClient(self.host, self.port, self.f, cf) else: c = internet.TCPClient(self.host, self.port, self.f) c.setServiceParent(self) buildbot-2.6.0/master/buildbot/reporters/mail.py000066400000000000000000000341361361162603000217500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from email import charset from email.header import Header from email.message import Message from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formatdate from email.utils import parseaddr from io import BytesIO from twisted.internet import defer from twisted.internet import reactor from twisted.python import log as twlog from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.process.properties import Properties from buildbot.process.results import Results from buildbot.reporters.notifier import ENCODING from buildbot.reporters.notifier import NotifierBase from buildbot.util import ssl from buildbot.util import unicode2bytes # this incantation teaches email to output utf-8 using 7- or 8-bit encoding, # although it has no effect before python-2.7. # needs to match notifier.ENCODING charset.add_charset(ENCODING, charset.SHORTEST, None, ENCODING) try: from twisted.mail.smtp import ESMTPSenderFactory [ESMTPSenderFactory] # for pyflakes except ImportError: ESMTPSenderFactory = None # Email parsing can be complex. We try to take a very liberal # approach. The local part of an email address matches ANY non # whitespace character. Rather allow a malformed email address than # croaking on a valid (the matching of domains should be correct # though; requiring the domain to not be a top level domain). With # these regular expressions, we can match the following: # # full.name@example.net # Full Name # VALID_EMAIL_ADDR = r"(?:\S+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)+\.?)" VALID_EMAIL = re.compile(r"^(?:%s|(.+\s+)?<%s>\s*)$" % ((VALID_EMAIL_ADDR,) * 2)) VALID_EMAIL_ADDR = re.compile(VALID_EMAIL_ADDR) @implementer(interfaces.IEmailLookup) class Domain(util.ComparableMixin): compare_attrs = ("domain") def __init__(self, domain): assert "@" not in domain self.domain = domain def getAddress(self, name): """If name is already an email address, pass it through.""" if '@' in name: return name return name + "@" + self.domain @implementer(interfaces.IEmailSender) class MailNotifier(NotifierBase): secrets = ["smtpUser", "smtpPassword"] def checkConfig(self, fromaddr, mode=("failing", "passing", "warnings"), tags=None, builders=None, addLogs=False, relayhost="localhost", buildSetSummary=False, subject="Buildbot %(result)s in %(title)s on %(builder)s", lookup=None, extraRecipients=None, sendToInterestedUsers=True, messageFormatter=None, extraHeaders=None, addPatch=True, useTls=False, useSmtps=False, smtpUser=None, smtpPassword=None, smtpPort=25, schedulers=None, branches=None, watchedWorkers='all', messageFormatterMissingWorker=None, dumpMailsToLog=False): if ESMTPSenderFactory is None: config.error("twisted-mail is not installed - cannot " "send mail") super(MailNotifier, self).checkConfig( mode=mode, tags=tags, builders=builders, buildSetSummary=buildSetSummary, messageFormatter=messageFormatter, subject=subject, addLogs=addLogs, addPatch=addPatch, schedulers=schedulers, branches=branches, watchedWorkers=watchedWorkers, messageFormatterMissingWorker=messageFormatterMissingWorker) if extraRecipients is None: extraRecipients = [] if not isinstance(extraRecipients, (list, tuple)): config.error("extraRecipients must be a list or tuple") else: for r in extraRecipients: if not isinstance(r, str) or not VALID_EMAIL.search(r): config.error( "extra recipient {} is not a valid email".format(r)) if lookup is not None: if not isinstance(lookup, str): assert interfaces.IEmailLookup.providedBy(lookup) if extraHeaders: if not isinstance(extraHeaders, dict): config.error("extraHeaders must be a dictionary") if useSmtps: ssl.ensureHasSSL(self.__class__.__name__) def reconfigService(self, fromaddr, mode=("failing", "passing", "warnings"), tags=None, builders=None, addLogs=False, relayhost="localhost", buildSetSummary=False, subject="Buildbot %(result)s in %(title)s on %(builder)s", lookup=None, extraRecipients=None, sendToInterestedUsers=True, messageFormatter=None, extraHeaders=None, addPatch=True, useTls=False, useSmtps=False, smtpUser=None, smtpPassword=None, smtpPort=25, schedulers=None, branches=None, watchedWorkers='all', messageFormatterMissingWorker=None, dumpMailsToLog=False): super(MailNotifier, self).reconfigService( mode=mode, tags=tags, builders=builders, buildSetSummary=buildSetSummary, messageFormatter=messageFormatter, subject=subject, addLogs=addLogs, addPatch=addPatch, schedulers=schedulers, branches=branches, watchedWorkers=watchedWorkers, messageFormatterMissingWorker=messageFormatterMissingWorker) if extraRecipients is None: extraRecipients = [] self.extraRecipients = extraRecipients self.sendToInterestedUsers = sendToInterestedUsers self.fromaddr = fromaddr self.relayhost = relayhost if lookup is not None: if isinstance(lookup, str): lookup = Domain(str(lookup)) self.lookup = lookup self.extraHeaders = extraHeaders self.useTls = useTls self.useSmtps = useSmtps self.smtpUser = smtpUser self.smtpPassword = smtpPassword self.smtpPort = smtpPort self.dumpMailsToLog = dumpMailsToLog def patch_to_attachment(self, patch, index): # patches are specifically converted to unicode before entering the db a = MIMEText(patch['body'].encode(ENCODING), _charset=ENCODING) a.add_header('Content-Disposition', "attachment", filename="source patch " + str(index)) return a @defer.inlineCallbacks def createEmail(self, msgdict, builderName, title, results, builds=None, patches=None, logs=None): text = msgdict['body'] type = msgdict['type'] if msgdict.get('subject') is not None: subject = msgdict['subject'] else: subject = self.subject % {'result': Results[results], 'projectName': title, 'title': title, 'builder': builderName} assert '\n' not in subject, \ "Subject cannot contain newlines" assert type in ('plain', 'html'), \ "'{}' message type must be 'plain' or 'html'.".format(type) if patches or logs: m = MIMEMultipart() txt = MIMEText(text, type, ENCODING) m.attach(txt) else: m = Message() m.set_payload(text, ENCODING) m.set_type("text/{}".format(type)) m['Date'] = formatdate(localtime=True) m['Subject'] = subject m['From'] = self.fromaddr # m['To'] is added later if patches: for (i, patch) in enumerate(patches): a = self.patch_to_attachment(patch, i) m.attach(a) if logs: for log in logs: name = "{}.{}".format(log['stepname'], log['name']) if (self._shouldAttachLog(log['name']) or self._shouldAttachLog(name)): # Use distinct filenames for the e-mail summary if self.buildSetSummary: filename = "{}.{}".format(log['buildername'], name) else: filename = name text = log['content']['content'] a = MIMEText(text.encode(ENCODING), _charset=ENCODING) a.add_header('Content-Disposition', "attachment", filename=filename) m.attach(a) # @todo: is there a better way to do this? # Add any extra headers that were requested, doing WithProperties # interpolation if only one build was given if self.extraHeaders: extraHeaders = self.extraHeaders if builds is not None and len(builds) == 1: props = Properties.fromDict(builds[0]['properties']) props.master = self.master extraHeaders = yield props.render(extraHeaders) for k, v in extraHeaders.items(): if k in m: twlog.msg("Warning: Got header " + k + " in self.extraHeaders " "but it already exists in the Message - " "not adding it.") m[k] = v return m @defer.inlineCallbacks def sendMessage(self, body, subject=None, type='plain', builderName=None, results=None, builds=None, users=None, patches=None, logs=None, worker=None): body = unicode2bytes(body) msgdict = {'body': body, 'subject': subject, 'type': type} # ensure message body ends with double carriage return if not body.endswith(b"\n\n"): msgdict['body'] = body + b'\n\n' m = yield self.createEmail(msgdict, builderName, self.master.config.title, results, builds, patches, logs) # now, who is this message going to? if worker is None: recipients = yield self.findInterrestedUsersEmails(list(users)) all_recipients = self.processRecipients(recipients, m) else: all_recipients = list(users) yield self.sendMail(m, all_recipients) def _shouldAttachLog(self, logname): if isinstance(self.addLogs, bool): return self.addLogs return logname in self.addLogs @defer.inlineCallbacks def findInterrestedUsersEmails(self, users): recipients = set() if self.sendToInterestedUsers: if self.lookup: dl = [] for u in users: dl.append(defer.maybeDeferred(self.lookup.getAddress, u)) users = yield defer.gatherResults(dl) for r in users: if r is None: # getAddress didn't like this address continue # Git can give emails like 'User' @foo.com so check # for two @ and chop the last if r.count('@') > 1: r = r[:r.rindex('@')] if VALID_EMAIL.search(r): recipients.add(r) else: twlog.msg("INVALID EMAIL: {}".format(r)) return recipients def formatAddress(self, addr): r = parseaddr(addr) if not r[0]: return r[1] return "\"%s\" <%s>" % (Header(r[0], 'utf-8').encode(), r[1]) def processRecipients(self, blamelist, m): to_recipients = set(blamelist) cc_recipients = set() # If we're sending to interested users put the extras in the # CC list so they can tell if they are also interested in the # change: if self.sendToInterestedUsers and to_recipients: cc_recipients.update(self.extraRecipients) else: to_recipients.update(self.extraRecipients) m['To'] = ", ".join([self.formatAddress(addr) for addr in sorted(to_recipients)]) if cc_recipients: m['CC'] = ", ".join([self.formatAddress(addr) for addr in sorted(cc_recipients)]) return list(to_recipients | cc_recipients) def sendMail(self, m, recipients): s = m.as_string() twlog.msg("sending mail ({} bytes) to".format(len(s)), recipients) if self.dumpMailsToLog: # pragma: no cover twlog.msg("mail data:\n{0}".format(s)) result = defer.Deferred() useAuth = self.smtpUser and self.smtpPassword s = unicode2bytes(s) recipients = [parseaddr(r)[1] for r in recipients] sender_factory = ESMTPSenderFactory( unicode2bytes(self.smtpUser), unicode2bytes(self.smtpPassword), parseaddr(self.fromaddr)[1], recipients, BytesIO(s), result, requireTransportSecurity=self.useTls, requireAuthentication=useAuth) if self.useSmtps: reactor.connectSSL(self.relayhost, self.smtpPort, sender_factory, ssl.ClientContextFactory()) else: reactor.connectTCP(self.relayhost, self.smtpPort, sender_factory) return result def isWorkerMessageNeeded(self, worker): return super(MailNotifier, self).isWorkerMessageNeeded(worker) \ and worker['notify'] buildbot-2.6.0/master/buildbot/reporters/message.py000066400000000000000000000174641361162603000224570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import jinja2 from twisted.internet import defer from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import statusToString from buildbot.reporters import utils class MessageFormatterBase: template_filename = 'default_mail.txt' template_type = 'plain' def __init__(self, template_dir=None, template_filename=None, template=None, subject_filename=None, subject=None, template_type=None, ctx=None, ): self.body_template = self.getTemplate(template_filename, template_dir, template) self.subject_template = None if subject_filename or subject: self.subject_template = self.getTemplate(subject_filename, template_dir, subject) if template_type is not None: self.template_type = template_type if ctx is None: ctx = {} self.ctx = ctx def getTemplate(self, filename, dirname, content): if content and (filename or dirname): config.error("Only one of template or template path can be given") if content: return jinja2.Template(content) if dirname is None: dirname = os.path.join(os.path.dirname(__file__), "templates") loader = jinja2.FileSystemLoader(dirname) env = jinja2.Environment( loader=loader, undefined=jinja2.StrictUndefined) if filename is None: filename = self.template_filename return env.get_template(filename) def buildAdditionalContext(self, master, ctx): ctx.update(self.ctx) def renderMessage(self, ctx): body = self.body_template.render(ctx) msgdict = {'body': body, 'type': self.template_type} if self.subject_template is not None: msgdict['subject'] = self.subject_template.render(ctx) return msgdict class MessageFormatter(MessageFormatterBase): template_filename = 'default_mail.txt' template_type = 'plain' def __init__(self, template_dir=None, template_filename=None, template=None, template_name=None, subject_filename=None, subject=None, template_type=None, ctx=None, wantProperties=True, wantSteps=False, wantLogs=False): if template_name is not None: config.warnDeprecated('0.9.1', "template_name is deprecated, use template_filename") template_filename = template_name super().__init__(template_dir=template_dir, template_filename=template_filename, template=template, subject_filename=subject_filename, subject=subject, template_type=template_type, ctx=ctx) self.wantProperties = wantProperties self.wantSteps = wantSteps self.wantLogs = wantLogs def getDetectedStatus(self, mode, results, previous_results): if results == FAILURE: if "change" in mode and previous_results is not None and previous_results != results: text = "new failure" elif "problem" in mode and previous_results and previous_results != FAILURE: text = "new failure" else: text = "failed build" elif results == WARNINGS: text = "problem in the build" elif results == SUCCESS: if "change" in mode and previous_results is not None and previous_results != results: text = "restored build" else: text = "passing build" elif results == EXCEPTION: text = "build exception" else: text = "%s build" % (statusToString(results)) return text def getProjects(self, source_stamps, master): projects = set() for ss in source_stamps: if ss['project']: projects.add(ss['project']) if not projects: projects = [master.config.title] return ', '.join(list(projects)) def messageSourceStamps(self, source_stamps): text = "" for ss in source_stamps: source = "" if ss['branch']: source += "[branch %s] " % ss['branch'] if ss['revision']: source += str(ss['revision']) else: source += "HEAD" if ss['patch'] is not None: source += " (plus patch)" discriminator = "" if ss['codebase']: discriminator = " '%s'" % ss['codebase'] text += "Build Source Stamp%s: %s\n" % (discriminator, source) return text def messageSummary(self, build, results): t = build['state_string'] if t: t = ": " + t else: t = "" if results == SUCCESS: text = "Build succeeded!" elif results == WARNINGS: text = "Build Had Warnings%s" % (t,) elif results == CANCELLED: text = "Build was cancelled" else: text = "BUILD FAILED%s" % (t,) return text @defer.inlineCallbacks def formatMessageForBuildResults(self, mode, buildername, buildset, build, master, previous_results, blamelist): """Generate a buildbot mail message and return a dictionary containing the message body, type and subject.""" ss_list = buildset['sourcestamps'] results = build['results'] ctx = dict(results=build['results'], mode=mode, buildername=buildername, workername=build['properties'].get( 'workername', [""])[0], buildset=buildset, build=build, projects=self.getProjects(ss_list, master), previous_results=previous_results, status_detected=self.getDetectedStatus( mode, results, previous_results), build_url=utils.getURLForBuild( master, build['builder']['builderid'], build['number']), buildbot_url=master.config.buildbotURL, blamelist=blamelist, summary=self.messageSummary(build, results), sourcestamps=self.messageSourceStamps(ss_list) ) yield self.buildAdditionalContext(master, ctx) msgdict = self.renderMessage(ctx) return msgdict class MessageFormatterMissingWorker(MessageFormatterBase): template_filename = 'missing_mail.txt' @defer.inlineCallbacks def formatMessageForMissingWorker(self, master, worker): ctx = dict(buildbot_title=master.config.title, buildbot_url=master.config.buildbotURL, worker=worker) yield self.buildAdditionalContext(master, ctx) msgdict = self.renderMessage(ctx) return msgdict buildbot-2.6.0/master/buildbot/reporters/notifier.py000066400000000000000000000307171361162603000226460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc from twisted.internet import defer from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import utils from buildbot.reporters.message import MessageFormatter as DefaultMessageFormatter from buildbot.reporters.message import MessageFormatterMissingWorker from buildbot.util import service ENCODING = 'utf-8' class NotifierBase(service.BuildbotService): name = None __meta__ = abc.ABCMeta possible_modes = ("change", "failing", "passing", "problem", "warnings", "exception", "cancelled") def computeShortcutModes(self, mode): if isinstance(mode, str): if mode == "all": mode = ("failing", "passing", "warnings", "exception", "cancelled") elif mode == "warnings": mode = ("failing", "warnings") else: mode = (mode,) return mode def checkConfig(self, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", addLogs=False, addPatch=False, schedulers=None, branches=None, watchedWorkers=None, messageFormatterMissingWorker=None): for m in self.computeShortcutModes(mode): if m not in self.possible_modes: if m == "all": config.error( "mode 'all' is not valid in an iterator and must be passed in as a separate string") else: config.error( "mode %s is not a valid mode" % (m,)) if self.name is None: self.name = self.__class__.__name__ if tags is not None: self.name += "_tags_" + "+".join(tags) if builders is not None: self.name += "_builders_" + "+".join(builders) if schedulers is not None: self.name += "_schedulers_" + "+".join(schedulers) if branches is not None: self.name += "_branches_" + "+".join(branches) self.name += "_".join(mode) if '\n' in subject: config.error( 'Newlines are not allowed in message subjects') # you should either limit on builders or tags, not both if builders is not None and tags is not None: config.error( "Please specify only builders or tags to include - " + "not both.") if not(watchedWorkers == 'all' or watchedWorkers is None or isinstance(watchedWorkers, (list, tuple, set))): config.error("watchedWorkers must be 'all', None, or list of worker names") def reconfigService(self, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", addLogs=False, addPatch=False, schedulers=None, branches=None, watchedWorkers=None, messageFormatterMissingWorker=None): self.mode = self.computeShortcutModes(mode) self.tags = tags self.builders = builders self.schedulers = schedulers self.branches = branches self.subject = subject self.addLogs = addLogs self.addPatch = addPatch if messageFormatter is None: messageFormatter = DefaultMessageFormatter() self.messageFormatter = messageFormatter if messageFormatterMissingWorker is None: messageFormatterMissingWorker = MessageFormatterMissingWorker() self.messageFormatterMissingWorker = messageFormatterMissingWorker self.buildSetSummary = buildSetSummary self._buildset_complete_consumer = None if watchedWorkers is None: self.watchedWorkers = () else: self.watchedWorkers = watchedWorkers @defer.inlineCallbacks def startService(self): yield super().startService() startConsuming = self.master.mq.startConsuming self._buildsetCompleteConsumer = yield startConsuming( self.buildsetComplete, ('buildsets', None, 'complete')) self._buildCompleteConsumer = yield startConsuming( self.buildComplete, ('builds', None, 'finished')) self._workerMissingConsumer = yield startConsuming( self.workerMissing, ('workers', None, 'missing')) @defer.inlineCallbacks def stopService(self): yield super().stopService() if self._buildsetCompleteConsumer is not None: yield self._buildsetCompleteConsumer.stopConsuming() self._buildsetCompleteConsumer = None if self._buildCompleteConsumer is not None: yield self._buildCompleteConsumer.stopConsuming() self._buildCompleteConsumer = None if self._workerMissingConsumer is not None: yield self._workerMissingConsumer.stopConsuming() self._workerMissingConsumer = None def wantPreviousBuild(self): return "change" in self.mode or "problem" in self.mode @defer.inlineCallbacks def buildsetComplete(self, key, msg): if not self.buildSetSummary: return bsid = msg['bsid'] res = yield utils.getDetailsForBuildset( self.master, bsid, wantProperties=self.messageFormatter.wantProperties, wantSteps=self.messageFormatter.wantSteps, wantPreviousBuild=self.wantPreviousBuild(), wantLogs=self.messageFormatter.wantLogs) builds = res['builds'] buildset = res['buildset'] # only include builds for which isMessageNeeded returns true builds = [build for build in builds if self.isMessageNeeded(build)] if builds: self.buildMessage("whole buildset", builds, buildset['results']) @defer.inlineCallbacks def buildComplete(self, key, build): if self.buildSetSummary: return br = yield self.master.data.get(("buildrequests", build['buildrequestid'])) buildset = yield self.master.data.get(("buildsets", br['buildsetid'])) yield utils.getDetailsForBuilds( self.master, buildset, [build], wantProperties=self.messageFormatter.wantProperties, wantSteps=self.messageFormatter.wantSteps, wantPreviousBuild=self.wantPreviousBuild(), wantLogs=self.messageFormatter.wantLogs) # only include builds for which isMessageNeeded returns true if self.isMessageNeeded(build): self.buildMessage( build['builder']['name'], [build], build['results']) def matchesAnyTag(self, tags): return self.tags and any(tag for tag in self.tags if tag in tags) def isMessageNeeded(self, build): # here is where we actually do something. builder = build['builder'] scheduler = build['properties'].get('scheduler', [None])[0] branch = build['properties'].get('branch', [None])[0] results = build['results'] if self.builders is not None and builder['name'] not in self.builders: return False # ignore this build if self.schedulers is not None and scheduler not in self.schedulers: return False # ignore this build if self.branches is not None and branch not in self.branches: return False # ignore this build if self.tags is not None and \ not self.matchesAnyTag(builder['tags']): return False # ignore this build if "change" in self.mode: prev = build['prev_build'] if prev and prev['results'] != results: return True if "failing" in self.mode and results == FAILURE: return True if "passing" in self.mode and results == SUCCESS: return True if "problem" in self.mode and results == FAILURE: prev = build['prev_build'] if prev and prev['results'] != FAILURE: return True if "warnings" in self.mode and results == WARNINGS: return True if "exception" in self.mode and results == EXCEPTION: return True if "cancelled" in self.mode and results == CANCELLED: return True return False @defer.inlineCallbacks def getLogsForBuild(self, build): all_logs = [] steps = yield self.master.data.get(('builds', build['buildid'], "steps")) for step in steps: logs = yield self.master.data.get(("steps", step['stepid'], 'logs')) for l in logs: l['stepname'] = step['name'] l['content'] = yield self.master.data.get(("logs", l['logid'], 'contents')) all_logs.append(l) return all_logs def getResponsibleUsersForBuild(self, master, buildid): # Use library method but subclassers may want to override that return utils.getResponsibleUsersForBuild(master, buildid) @defer.inlineCallbacks def buildMessage(self, name, builds, results): patches = [] logs = [] body = "" subject = None msgtype = None users = set() for build in builds: if self.addPatch: ss_list = build['buildset']['sourcestamps'] for ss in ss_list: if 'patch' in ss and ss['patch'] is not None: patches.append(ss['patch']) if self.addLogs: build_logs = yield self.getLogsForBuild(build) logs.extend(build_logs) if 'prev_build' in build and build['prev_build'] is not None: previous_results = build['prev_build']['results'] else: previous_results = None blamelist = yield self.getResponsibleUsersForBuild(self.master, build['buildid']) buildmsg = yield self.messageFormatter.formatMessageForBuildResults( self.mode, name, build['buildset'], build, self.master, previous_results, blamelist) users.update(set(blamelist)) msgtype = buildmsg['type'] body += buildmsg['body'] if 'subject' in buildmsg: subject = buildmsg['subject'] yield self.sendMessage(body, subject, msgtype, name, results, builds, list(users), patches, logs) @abc.abstractmethod def sendMessage(self, body, subject=None, type=None, builderName=None, results=None, builds=None, users=None, patches=None, logs=None, worker=None): pass def isWorkerMessageNeeded(self, worker): return self.watchedWorkers == 'all' or worker['name'] in self.watchedWorkers @defer.inlineCallbacks def workerMissing(self, key, worker): if not self.isWorkerMessageNeeded(worker): return msg = yield self.messageFormatterMissingWorker.formatMessageForMissingWorker(self.master, worker) text = msg['body'].encode(ENCODING) if 'subject' in msg: subject = msg['subject'] else: subject = "Buildbot worker {name} missing".format(**worker) assert msg['type'] in ('plain', 'html'), \ "'%s' message type must be 'plain' or 'html'." % msg['type'] yield self.sendMessage(text, subject, msg['type'], users=worker['notify'], worker=worker['name']) buildbot-2.6.0/master/buildbot/reporters/pushjet.py000066400000000000000000000120241361162603000225000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log as twlog from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.reporters.message import MessageFormatter as DefaultMessageFormatter from buildbot.reporters.message import MessageFormatterMissingWorker from buildbot.reporters.notifier import NotifierBase from buildbot.util import httpclientservice ENCODING = 'utf8' LEVELS = { CANCELLED: 'cancelled', EXCEPTION: 'exception', FAILURE: 'failing', SUCCESS: 'passing', WARNINGS: 'warnings' } class PushjetNotifier(NotifierBase): def checkConfig(self, secret, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", schedulers=None, branches=None, levels=None, base_url='https://api.pushjet.io', watchedWorkers=None, messageFormatterMissingWorker=None): super(PushjetNotifier, self).checkConfig(mode, tags, builders, buildSetSummary, messageFormatter, subject, False, False, schedulers, branches, watchedWorkers) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, secret, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", schedulers=None, branches=None, levels=None, base_url='https://api.pushjet.io', watchedWorkers=None, messageFormatterMissingWorker=None): secret = yield self.renderSecrets(secret) if messageFormatter is None: messageFormatter = DefaultMessageFormatter(template_type='html', template_filename='default_notification.txt') if messageFormatterMissingWorker is None: messageFormatterMissingWorker = MessageFormatterMissingWorker( template_filename='missing_notification.txt') super(PushjetNotifier, self).reconfigService(mode, tags, builders, buildSetSummary, messageFormatter, subject, False, False, schedulers, branches, watchedWorkers, messageFormatterMissingWorker) self.secret = secret if levels is None: self.levels = {} else: self.levels = levels self._http = yield httpclientservice.HTTPClientService.getService( self.master, base_url) def sendMessage(self, body, subject=None, type=None, builderName=None, results=None, builds=None, users=None, patches=None, logs=None, worker=None): if worker is not None and worker not in self.watchedWorkers: return msg = {'message': body} level = self.levels.get(LEVELS[results] if worker is None else 'worker_missing') if level is not None: msg['level'] = level if subject is not None: msg['title'] = subject else: msg['title'] = self.subject % {'result': Results[results], 'projectName': self.master.config.title, 'title': self.master.config.title, 'builder': builderName} return self.sendNotification(msg) def sendNotification(self, params): twlog.msg("sending pushjet notification") params.update(dict(secret=self.secret)) return self._http.post('/message', data=params) buildbot-2.6.0/master/buildbot/reporters/pushover.py000066400000000000000000000134431361162603000226770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log as twlog from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.reporters.message import MessageFormatter as DefaultMessageFormatter from buildbot.reporters.message import MessageFormatterMissingWorker from buildbot.reporters.notifier import NotifierBase from buildbot.util import httpclientservice ENCODING = 'utf8' VALID_PARAMS = {"sound", "callback", "timestamp", "url", "url_title", "device", "retry", "expire", "html"} PRIORITIES = { CANCELLED: 'cancelled', EXCEPTION: 'exception', FAILURE: 'failing', SUCCESS: 'passing', WARNINGS: 'warnings' } class PushoverNotifier(NotifierBase): def checkConfig(self, user_key, api_token, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", schedulers=None, branches=None, priorities=None, otherParams=None, watchedWorkers=None, messageFormatterMissingWorker=None): super(PushoverNotifier, self).checkConfig(mode, tags, builders, buildSetSummary, messageFormatter, subject, False, False, schedulers, branches, watchedWorkers) httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) if otherParams is not None and set(otherParams.keys()) - VALID_PARAMS: config.error("otherParams can be only 'sound', 'callback', 'timestamp', " "'url', 'url_title', 'device', 'retry', 'expire', or 'html'") @defer.inlineCallbacks def reconfigService(self, user_key, api_token, mode=("failing", "passing", "warnings"), tags=None, builders=None, buildSetSummary=False, messageFormatter=None, subject="Buildbot %(result)s in %(title)s on %(builder)s", schedulers=None, branches=None, priorities=None, otherParams=None, watchedWorkers=None, messageFormatterMissingWorker=None): user_key, api_token = yield self.renderSecrets(user_key, api_token) if messageFormatter is None: messageFormatter = DefaultMessageFormatter(template_type='html', template_filename='default_notification.txt') if messageFormatterMissingWorker is None: messageFormatterMissingWorker = MessageFormatterMissingWorker( template_filename='missing_notification.txt') super(PushoverNotifier, self).reconfigService(mode, tags, builders, buildSetSummary, messageFormatter, subject, False, False, schedulers, branches, watchedWorkers, messageFormatterMissingWorker) self.user_key = user_key self.api_token = api_token if priorities is None: self.priorities = {} else: self.priorities = priorities if otherParams is None: self.otherParams = {} else: self.otherParams = otherParams self._http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushover.net') def sendMessage(self, body, subject=None, type=None, builderName=None, results=None, builds=None, users=None, patches=None, logs=None, worker=None): if worker is not None and worker not in self.watchedWorkers: return msg = {'message': body} if type == 'html': msg['html'] = '1' try: msg['priority'] = self.priorities[PRIORITIES[results] if worker is None else 'worker_missing'] except KeyError: pass if subject is not None: msg['title'] = subject else: msg['title'] = self.subject % {'result': Results[results], 'projectName': self.master.config.title, 'title': self.master.config.title, 'builder': builderName} return self.sendNotification(msg) def sendNotification(self, params): twlog.msg("sending pushover notification") params.update(dict(user=self.user_key, token=self.api_token)) params.update(self.otherParams) return self._http.post('/1/messages.json', params=params) buildbot-2.6.0/master/buildbot/reporters/stash.py000066400000000000000000000020421361162603000221370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from buildbot.reporters.bitbucketserver import BitbucketServerStatusPush def StashStatusPush(*args, **kwargs): warnings.warn("The 'StashStatusPush' class was renamed to " "'BitbucketServer.BitbucketServerStatusPush'", DeprecationWarning) return BitbucketServerStatusPush(*args, **kwargs) buildbot-2.6.0/master/buildbot/reporters/telegram.py000066400000000000000000001144401361162603000226230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import io import json import random import shlex from twisted.internet import defer from twisted.internet import reactor from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.words import Channel from buildbot.reporters.words import Contact from buildbot.reporters.words import StatusBot from buildbot.reporters.words import UsageError from buildbot.reporters.words import WebhookResource from buildbot.schedulers.forcesched import CollectedValidationError from buildbot.schedulers.forcesched import ForceScheduler from buildbot.util import Notifier from buildbot.util import asyncSleep from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import unicode2bytes class TelegramChannel(Channel): def __init__(self, bot, channel): assert isinstance(channel, dict), "channel must be a dict provided by Telegram API" super().__init__(bot, channel['id']) self.chat_info = channel @defer.inlineCallbacks def list_notified_events(self): if self.notify_events: yield self.send("The following events are being notified:\n{}" .format("\n".join(sorted( "🔔 **{}**".format(n) for n in self.notify_events)))) else: yield self.send("🔕 No events are being notified.") def collect_fields(fields): for field in fields: if field['fullName']: yield field if 'fields' in field: yield from collect_fields(field['fields']) class TelegramContact(Contact): def __init__(self, user, channel=None): assert isinstance(user, dict), "user must be a dict provided by Telegram API" self.user_info = user super().__init__(user['id'], channel) self.template = None @property def chat_id(self): return self.channel.id @property def user_full_name(self): fullname = " ".join((self.user_info['first_name'], self.user_info.get('last_name', ''))).strip() return fullname @property def user_name(self): return self.user_info['first_name'] def describeUser(self): user = self.user_full_name try: user += ' (@{})'.format(self.user_info['username']) except KeyError: pass if not self.is_private_chat: chat_title = self.channel.chat_info.get('title') if chat_title: user += " on '{}'".format(chat_title) return user ACCESS_DENIED_MESSAGES = [ "🧙‍♂️ You shall not pass! 👹", "😨 Oh NO! You are simply not allowed to to this! 😢", "⛔ You cannot do this. Better go outside and relax... 🌳", "⛔ ACCESS DENIED! This incident has ben reported to NSA, KGB, and George Soros! 🕵", "🚫 Unauthorized access detected! Your device will explode in 3... 2... 1... 💣", "☢ Radiation level too high! Continuation of the procedure forbidden! 🛑", ] def access_denied(self, *args, tmessage, **kwargs): self.send( random.choice(self.ACCESS_DENIED_MESSAGES), reply_to_message_id=tmessage['message_id']) def query_button(self, caption, payload): if isinstance(payload, str) and len(payload) < 64: return {'text': caption, 'callback_data': payload} key = hash(repr(payload)) while True: cached = self.bot.query_cache.get(key) if cached is None: self.bot.query_cache[key] = payload break if cached == payload: break key += 1 return {'text': caption, 'callback_data': key} @defer.inlineCallbacks def command_START(self, args, **kwargs): yield self.command_HELLO(args) reactor.callLater(0.2, self.command_HELP, '') def command_NAY(self, args, tmessage, **kwargs): """forget the current command""" replied_message = tmessage.get('reply_to_message') if replied_message: if 'reply_markup' in replied_message: self.bot.edit_keyboard(self.channel.id, replied_message['message_id']) if self.is_private_chat: self.send("Never mind...") else: self.send("Never mind, {}...".format(self.user_name)) command_NAY.usage = "nay - never mind the command we are currently discussing" @classmethod def describe_commands(cls): commands = cls.build_commands() response = [] for command in commands: if command == 'start': continue meth = getattr(cls, 'command_' + command.upper()) doc = getattr(meth, '__doc__', None) if not doc: doc = command response.append("{} - {}".format(command, doc)) return response @Contact.overrideCommand def command_COMMANDS(self, args, **kwargs): if args.lower() == 'botfather': response = self.describe_commands() if response: self.send('\n'.join(response)) else: return super().command_COMMANDS(args) @defer.inlineCallbacks def command_GETID(self, args, **kwargs): """get user and chat ID""" if self.is_private_chat: self.send("Your ID is {}.".format(self.user_id)) else: yield self.send("{}, your ID is {}.".format(self.user_name, self.user_id)) self.send("This {} ID is {}.".format(self.channel.chat_info.get('type', "group"), self.chat_id)) command_GETID.usage = "getid - get user and chat ID that can be put in the master configuration file" @defer.inlineCallbacks @Contact.overrideCommand def command_LIST(self, args, **kwargs): args = self.splitArgs(args) if not args: keyboard = [ [self.query_button("👷️ Builders", '/list builders'), self.query_button("👷️ (including old ones)", '/list all builders')], [self.query_button("⚙ Workers", '/list workers'), self.query_button("⚙ (including old ones)", '/list all workers')], [self.query_button("📄 Changes (last 10)", '/list changes')], ] self.send("What do you want to list?", reply_markup={'inline_keyboard': keyboard}) return all = False num = 10 try: num = int(args[0]) del args[0] except ValueError: if args[0] == 'all': all = True del args[0] except IndexError: pass if not args: raise UsageError("Try '" + self.bot.commandPrefix + "list [all|N] builders|workers|changes'.") if args[0] == 'builders': bdicts = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() response = ["I found the following **builders**:"] for bdict in bdicts: if bdict['builderid'] in online_builderids: response.append("`{}`".format(bdict['name'])) elif all: response.append("`{}` ❌".format(bdict['name'])) self.send('\n'.join(response)) elif args[0] == 'workers': workers = yield self.master.data.get(('workers',)) response = ["I found the following **workers**:"] for worker in workers: if worker['configured_on']: response.append("`{}`".format(worker['name'])) if not worker['connected_to']: response[-1] += " ⚠️" elif all: response.append("`{}` ❌".format(worker['name'])) self.send('\n'.join(response)) elif args[0] == 'changes': wait_message = yield self.send("⏳ Getting your changes...") if all: changes = yield self.master.db.changes.getChanges() self.bot.delete_message(self.channel.id, wait_message['message_id']) num = len(changes) if num > 50: keyboard = [ [self.query_button("‼ Yes, flood me with all of them!", '/list {} changes'.format(num))], [self.query_button("✅ No, just show last 50", '/list 50 changes')] ] self.send("I found {} changes. Do you really want me to list them all?".format(num), reply_markup={'inline_keyboard': keyboard}) return else: changes = yield self.master.db.changes.getRecentChanges(num) self.bot.delete_message(self.channel.id, wait_message['message_id']) response = ["I found the following recent **changes**:\n"] for change in reversed(changes): change['comment'] = change['comments'].split('\n')[0] change['date'] = change['when_timestamp'].strftime('%Y-%m-%d %H:%M') response.append( "[{comment}]({revlink})\n" "_Author_: {author}\n" "_Date_: {date}\n" "_Repository_: {repository}\n" "_Branch_: {branch}\n" "_Revision_: {revision}\n".format(**change)) self.send('\n'.join(response)) @defer.inlineCallbacks def get_running_builders(self): builders = [] for bdict in (yield self.bot.getAllBuilders()): if (yield self.bot.getRunningBuilds(bdict['builderid'])): builders.append(bdict['name']) return builders @defer.inlineCallbacks @Contact.overrideCommand def command_WATCH(self, args, **kwargs): if args: super().command_WATCH(args) else: builders = yield self.get_running_builders() if builders: keyboard = [ [self.query_button("🔎 " + b, '/watch {}'.format(b))] for b in builders ] self.send("Which builder do you want to watch?", reply_markup={'inline_keyboard': keyboard}) else: self.send("There are no currently running builds.") @Contact.overrideCommand def command_NOTIFY(self, args, tquery=None, **kwargs): if args: want_list = args == 'list' if want_list and tquery: self.bot.delete_message(self.chat_id, tquery['message']['message_id']) super().command_NOTIFY(args) if want_list or not tquery: return keyboard = [ [ self.query_button("{} {}".format(e.capitalize(), '🔔' if e in self.channel.notify_events else '🔕'), '/notify {}-quiet {}'.format( 'off' if e in self.channel.notify_events else 'on', e)) for e in evs ] for evs in (('started', 'finished'), ('success', 'failure'), ('warnings', 'exception'), ('problem', 'recovery'), ('worse', 'better'), ('cancelled', 'worker')) ] + [[self.query_button("Hide...", '/notify list')]] if tquery: self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id'], keyboard) else: self.send("Here are available notifications and their current state. " "Click to turn them on/off.", reply_markup={'inline_keyboard': keyboard}) def ask_for_reply(self, prompt, greeting='Ok'): kwargs = {} if not self.is_private_chat: username = self.user_info.get('username', '') if username: if greeting: prompt = "{} @{}, now {}...".format(greeting, username, prompt) else: prompt = "@{}, now {}...".format(username, prompt) kwargs['reply_markup'] = { 'force_reply': True, 'selective': True } else: if greeting: prompt = "{}, now reply to this message and {}...".format(greeting, prompt) else: prompt = "Reply to this message and {}...".format(prompt) else: if greeting: prompt = "{}, now {}...".format(greeting, prompt) else: prompt = prompt[0].upper() + prompt[1:] + "..." # Telegram seems to have a bug, which causes reply request to pop sometimes again. # So we do not force reply to avoid it... # kwargs['reply_markup'] = { # 'force_reply': True # } self.send(prompt, **kwargs) @defer.inlineCallbacks @Contact.overrideCommand def command_STOP(self, args, **kwargs): argv = self.splitArgs(args) if len(argv) >= 3 or \ argv and argv[0] != 'build': super().command_STOP(args) return argv = argv[1:] if not argv: builders = yield self.get_running_builders() if builders: keyboard = [ [self.query_button("🚫 " + b, '/stop build {}'.format(b))] for b in builders ] self.send("Select builder to stop...", reply_markup={'inline_keyboard': keyboard}) else: # len(argv) == 1 self.template = '/stop ' + args + ' {}' self.ask_for_reply("give me the reason to stop build on `{}`".format(argv[0])) @Contact.overrideCommand def command_SHUTDOWN(self, args, **kwargs): if args: return super().command_SHUTDOWN(args) if self.master.botmaster.shuttingDown: keyboard = [[ self.query_button("🔙 Stop Shutdown", '/shutdown stop'), self.query_button("‼️ Shutdown Now", '/shutdown now') ]] text = "Buildbot is currently shutting down.\n\n" else: keyboard = [[ self.query_button("↘️ Begin Shutdown", '/shutdown start'), self.query_button("‼️ Shutdown Now", '/shutdown now') ]] text = "" self.send(text + "What do you want to do?", reply_markup={'inline_keyboard': keyboard}) @defer.inlineCallbacks def command_FORCE(self, args, tquery=None, partial=None, **kwargs): """force a build""" try: forceschedulers = yield self.master.data.get(('forceschedulers',)) except AttributeError: forceschedulers = None else: forceschedulers = dict((s['name'], s) for s in forceschedulers) if not forceschedulers: raise UsageError("no force schedulers configured for use by /force") argv = self.splitArgs(args) try: sched = argv[0] except IndexError: if len(forceschedulers) == 1: sched = next(iter(forceschedulers)) else: keyboard = [ [self.query_button(s['label'], '/force {}'.format(s['name']))] for s in forceschedulers.values() ] self.send("Which force scheduler do you want to activate?", reply_markup={'inline_keyboard': keyboard}) return else: if sched in forceschedulers: del argv[0] elif len(forceschedulers) == 1: sched = next(iter(forceschedulers)) else: raise UsageError("Try '/force' and follow the instructions" " (no force scheduler {})".format(sched)) scheduler = forceschedulers[sched] try: task = argv.pop(0) except IndexError: task = 'config' if tquery and task != 'config': self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id']) if not argv: keyboard = [ [self.query_button(b, '/force {} {} {}'.format(sched, task, b))] for b in scheduler['builder_names'] ] self.send("Which builder do you want to start?", reply_markup={'inline_keyboard': keyboard}) return if task == 'ask': try: what = argv.pop(0) except IndexError: raise UsageError("Try '/force' and follow the instructions") else: what = None # silence PyCharm warnings bldr = argv.pop(0) if bldr not in scheduler['builder_names']: raise UsageError("Try '/force' and follow the instructions (`{}` not configured for _{}_ scheduler)" .format(bldr, scheduler['label'])) try: params = dict(arg.split('=', 1) for arg in argv) except ValueError as err: raise UsageError("Try '/force' and follow the instructions ({})".format(err)) all_fields = list(collect_fields(scheduler['all_fields'])) required_params = [f['fullName'] for f in all_fields if f['required'] and f['fullName'] not in ('username', 'owner')] missing_params = [p for p in required_params if p not in params] if task == 'build': # TODO This should probably be moved to the upper class, # however, it will change the force command totally try: if missing_params: # raise UsageError task = 'config' else: params.update(dict( (f['fullName'], f['default']) for f in all_fields if f['type'] == 'fixed' and f['fullName'] not in ('username', 'owner') )) builder = yield self.bot.getBuilder(buildername=bldr) for scheduler in self.master.allSchedulers(): if scheduler.name == sched and isinstance(scheduler, ForceScheduler): break else: raise ValueError("There is no force scheduler '{}'".format(sched)) try: yield scheduler.force(builderid=builder['builderid'], owner=self.describeUser(), **params) except CollectedValidationError as err: raise ValueError(err.errors) else: self.send("Force build successfully requested.") return except (IndexError, ValueError) as err: raise UsageError("Try '/force' and follow the instructions ({})".format(err)) if task == 'config': msg = "{}, you are about to start a new build on `{}`!"\ .format(self.user_full_name, bldr) keyboard = [] args = ' '.join(shlex.quote("{}={}".format(*p)) for p in params.items()) fields = [f for f in all_fields if f['type'] != 'fixed' and f['fullName'] not in ('username', 'owner')] if fields: msg += "\n\nThe current build parameters are:" for field in fields: if field['type'] == 'nested': msg += "\n{}".format(field['label']) else: field_name = field['fullName'] value = params.get(field_name, field['default']).strip() msg += "\n {} `{}`".format(field['label'], value) if value: key = "Change " else: key = "Set " key += field_name.replace('_', ' ').title() if field_name in missing_params: key = "⚠️ " + key msg += " ⚠️" keyboard.append( [self.query_button(key, '/force {} ask {} {} {}' .format(sched, field_name, bldr, args))] ) msg += "\n\nWhat do you want to do?" if missing_params: msg += " You must set values for all parameters marked with ⚠️" if not missing_params: keyboard.append( [self.query_button("🚀 Start Build", '/force {} build {} {}' .format(sched, bldr, args))], ) self.send(msg, reply_markup={'inline_keyboard': keyboard}) elif task == 'ask': prompt = "enter the new value for the " + what.replace('_', ' ').lower() args = ' '.join(shlex.quote("{}={}".format(*p)) for p in params.items() if p[0] != what) self.template = '/force {} config {} {} {}={{}}'.format(sched, bldr, args, what) self.ask_for_reply(prompt, '') else: raise UsageError("Try '/force' and follow the instructions") command_FORCE.usage = "force - Force a build" class TelegramStatusBot(StatusBot): contactClass = TelegramContact channelClass = TelegramChannel commandPrefix = '/' offline_string = "offline ❌" idle_string = "idle 💤" running_string = "running 🌀:" query_cache = {} @property def commandSuffix(self): if self.nickname is not None: return '@' + self.nickname return None def __init__(self, token, outgoing_http, chat_ids, *args, retry_delay=30, **kwargs): super().__init__(*args, **kwargs) self.http_client = outgoing_http self.retry_delay = retry_delay self.token = token self.chat_ids = chat_ids self.nickname = None @defer.inlineCallbacks def startService(self): yield super().startService() for c in self.chat_ids: channel = self.getChannel(c) channel.add_notification_events(self.notify_events) yield self.loadState() results_emoji = { SUCCESS: ' ✅', WARNINGS: ' ⚠️', FAILURE: '❗', EXCEPTION: ' ‼️', RETRY: ' 🔄', CANCELLED: ' 🚫', } def format_build_status(self, build, short=False): br = build['results'] if short: return self.results_emoji[br] else: return self.results_descriptions[br] + \ self.results_emoji[br] def getContact(self, user, channel): """ get a Contact instance for ``user`` on ``channel`` """ assert isinstance(user, dict), "user must be a dict provided by Telegram API" assert isinstance(channel, dict), "channel must be a dict provided by Telegram API" uid = user['id'] cid = channel['id'] try: contact = self.contacts[(cid, uid)] except KeyError: valid = self.isValidUser(uid) contact = self.contactClass(user=user, channel=self.getChannel(channel, valid)) if valid: self.contacts[(cid, uid)] = contact else: if isinstance(user, dict): contact.user_info.update(user) if isinstance(channel, dict): contact.channel.chat_info.update(channel) return contact def getChannel(self, channel, valid=True): if not isinstance(channel, dict): channel = {'id': channel} cid = channel['id'] try: return self.channels[cid] except KeyError: new_channel = self.channelClass(self, channel) if valid: self.channels[cid] = new_channel new_channel.setServiceParent(self) return new_channel @defer.inlineCallbacks def process_update(self, update): data = {} message = update.get('message') if message is None: query = update.get('callback_query') if query is None: self.log('No message in Telegram update object') return 'no message' original_message = query.get('message', {}) data = query.get('data', 0) try: data = self.query_cache[int(data)] except ValueError: text, data, notify = data, {}, None except KeyError: text, data, notify = None, {}, "Sorry, button is no longer valid!" if original_message: try: self.edit_keyboard( original_message['chat']['id'], original_message['message_id']) except KeyError: pass else: if isinstance(data, dict): data = data.copy() text = data.pop('command') try: notify = data.pop('notify') except KeyError: notify = None else: text, data, notify = data, {}, None data['tquery'] = query self.answer_query(query['id'], notify) message = { 'from': query['from'], 'chat': original_message.get('chat'), 'text': text, } if 'reply_to_message' in original_message: message['reply_to_message'] = original_message['reply_to_message'] chat = message['chat'] user = message.get('from') if user is None: self.log('No user in incoming message') return 'no user' text = message.get('text') if not text: return 'no text in the message' contact = self.getContact(user=user, channel=chat) data['tmessage'] = message template, contact.template = contact.template, None if text.startswith(self.commandPrefix): result = yield contact.handleMessage(text, **data) else: if template: text = template.format(shlex.quote(text)) result = yield contact.handleMessage(text, **data) return result @defer.inlineCallbacks def post(self, path, **kwargs): logme = True while True: try: res = yield self.http_client.post(path, **kwargs) except AssertionError as err: # just for tests raise err except Exception as err: msg = "ERROR: problem sending Telegram request {} (will try again): {}".format(path, err) if logme: self.log(msg) logme = False yield asyncSleep(self.retry_delay) else: ans = yield res.json() if not ans.get('ok'): self.log("ERROR: cannot send Telegram request {}: " "[{}] {}".format(path, res.code, ans.get('description'))) return None return ans.get('result', True) @defer.inlineCallbacks def set_nickname(self): res = yield self.post('/getMe') if res: self.nickname = res.get('username') @defer.inlineCallbacks def answer_query(self, query_id, notify=None): params = dict(callback_query_id=query_id) if notify is not None: params.update(dict(text=notify)) return (yield self.post('/answerCallbackQuery', json=params)) @defer.inlineCallbacks def send_message(self, chat, message, parse_mode='Markdown', reply_to_message_id=None, reply_markup=None, **kwargs): result = None message = message.strip() while message: params = dict(chat_id=chat) if parse_mode is not None: params['parse_mode'] = parse_mode if reply_to_message_id is not None: params['reply_to_message_id'] = reply_to_message_id reply_to_message_id = None # we only mark first message as a reply if len(message) <= 4096: params['text'], message = message, None else: n = message[:4096].rfind('\n') n = n + 1 if n != -1 else 4096 params['text'], message = message[:n].rstrip(), message[n:].lstrip() if not message and reply_markup is not None: params['reply_markup'] = reply_markup params.update(kwargs) result = yield self.post('/sendMessage', json=params) return result @defer.inlineCallbacks def edit_message(self, chat, msg, message, parse_mode='Markdown', **kwargs): params = dict(chat_id=chat, message_id=msg, text=message) if parse_mode is not None: params['parse_mode'] = parse_mode params.update(kwargs) return (yield self.post('/editMessageText', json=params)) @defer.inlineCallbacks def edit_keyboard(self, chat, msg, keyboard=None): params = dict(chat_id=chat, message_id=msg) if keyboard is not None: params['reply_markup'] = {'inline_keyboard': keyboard} return (yield self.post('/editMessageReplyMarkup', json=params)) @defer.inlineCallbacks def delete_message(self, chat, msg): params = dict(chat_id=chat, message_id=msg) return (yield self.post('/deleteMessage', json=params)) @defer.inlineCallbacks def send_sticker(self, chat, sticker, **kwargs): params = dict(chat_id=chat, sticker=sticker) params.update(kwargs) return (yield self.post('/sendSticker', json=params)) class TelegramWebhookBot(TelegramStatusBot): name = "TelegramWebhookBot" def __init__(self, token, *args, certificate=None, **kwargs): TelegramStatusBot.__init__(self, token, *args, **kwargs) self._certificate = certificate self.webhook = WebhookResource('telegram' + token) self.webhook.setServiceParent(self) @defer.inlineCallbacks def startService(self): yield super().startService() url = bytes2unicode(self.master.config.buildbotURL) if not url.endswith('/'): url += '/' yield self.set_webhook(url + self.webhook.path, self._certificate) def process_webhook(self, request): update = self.get_update(request) return self.process_update(update) def get_update(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type is not None and \ content_type.startswith('application/json'): update = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) return update @defer.inlineCallbacks def set_webhook(self, url, certificate=None): if not certificate: self.log("Setting up webhook to: {}".format(url)) yield self.post('/setWebhook', json=dict(url=url)) else: self.log("Setting up webhook to: {} (custom certificate)".format(url)) certificate = io.BytesIO(unicode2bytes(certificate)) yield self.post('/setWebhook', data=dict(url=url), files=dict(certificate=certificate)) class TelegramPollingBot(TelegramStatusBot): name = "TelegramPollingBot" def __init__(self, *args, poll_timeout=120, **kwargs): super().__init__(*args, **kwargs) self._polling_finished_notifier = Notifier() self.poll_timeout = poll_timeout def startService(self): super().startService() self._polling_continue = True self.do_polling() @defer.inlineCallbacks def stopService(self): self._polling_continue = False yield self._polling_finished_notifier.wait() yield super().stopService() @defer.inlineCallbacks def do_polling(self): yield self.post('/deleteWebhook') offset = 0 kwargs = {'json': {'timeout': self.poll_timeout}} logme = True while self._polling_continue: if offset: kwargs['json']['offset'] = offset try: res = yield self.http_client.post('/getUpdates', timeout=self.poll_timeout + 2, **kwargs) ans = yield res.json() if not ans.get('ok'): raise ValueError("[{}] {}".format(res.code, ans.get('description'))) updates = ans.get('result') except AssertionError as err: raise err except Exception as err: msg = "ERROR: cannot send Telegram request /getUpdates (will try again): {}".format(err) if logme: self.log(msg) logme = False yield asyncSleep(self.retry_delay) else: logme = True if updates: offset = max(update['update_id'] for update in updates) + 1 for update in updates: yield self.process_update(update) self._polling_finished_notifier.notify(None) class TelegramBot(service.BuildbotService): name = "TelegramBot" in_test_harness = False compare_attrs = ["bot_token", "chat_ids", "authz", "tags", "notify_events", "showBlameList", "useRevisions", "certificate", "useWebhook", "pollTimeout", "retryDelay"] secrets = ["bot_token"] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.bot = None def _get_http(self, bot_token): base_url = "https://api.telegram.org/bot" + bot_token return httpclientservice.HTTPClientService.getService( self.master, base_url) def checkConfig(self, bot_token, chat_ids=None, authz=None, bot_username=None, tags=None, notify_events=None, showBlameList=True, useRevisions=False, useWebhook=False, certificate=None, pollTimeout=120, retryDelay=30): super().checkConfig(self.name) if authz is not None: for acl in authz.values(): if not isinstance(acl, (list, tuple, bool)): config.error("authz values must be bool or a list of user ids") if isinstance(certificate, io.TextIOBase): config.error("certificate file must be open in binary mode") @defer.inlineCallbacks def reconfigService(self, bot_token, chat_ids=None, authz=None, bot_username=None, tags=None, notify_events=None, showBlameList=True, useRevisions=False, useWebhook=False, certificate=None, pollTimeout=120, retryDelay=30): # need to stash these so we can detect changes later self.bot_token = bot_token if chat_ids is None: chat_ids = [] self.chat_ids = chat_ids self.authz = authz self.useRevisions = useRevisions self.tags = tags if notify_events is None: notify_events = set() self.notify_events = notify_events self.useWebhook = useWebhook self.certificate = certificate self.pollTimeout = pollTimeout self.retryDelay = retryDelay # This function is only called in case of reconfig with changes # We don't try to be smart here. Just restart the bot if config has # changed. http = yield self._get_http(bot_token) if self.bot is not None: self.removeService(self.bot) if not useWebhook: self.bot = TelegramPollingBot(bot_token, http, chat_ids, authz, tags=tags, notify_events=notify_events, useRevisions=useRevisions, showBlameList=showBlameList, poll_timeout=self.pollTimeout, retry_delay=self.retryDelay) else: self.bot = TelegramWebhookBot(bot_token, http, chat_ids, authz, tags=tags, notify_events=notify_events, useRevisions=useRevisions, showBlameList=showBlameList, retry_delay=self.retryDelay, certificate=self.certificate) if bot_username is not None: self.bot.nickname = bot_username else: yield self.bot.set_nickname() if self.bot.nickname is None: raise RuntimeError("No bot username specified and I cannot get it from Telegram") yield self.bot.setServiceParent(self) buildbot-2.6.0/master/buildbot/reporters/templates/000077500000000000000000000000001361162603000224435ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/reporters/templates/default_mail.txt000066400000000000000000000006051361162603000256330ustar00rootroot00000000000000The Buildbot has detected a {{ status_detected }} on builder {{ buildername }} while building {{ projects }}. Full details are available at: {{ build_url }} Buildbot URL: {{ buildbot_url }} Worker for this Build: {{ workername }} Build Reason: {{ build['properties'].get('reason', [""])[0] }} Blamelist: {{ ", ".join(blamelist) }} {{ summary }} Sincerely, -The Buildbot buildbot-2.6.0/master/buildbot/reporters/templates/default_notification.txt000066400000000000000000000002361361162603000273770ustar00rootroot00000000000000The Buildbot has detected a {{ status_detected }} of {{ buildername }} while building {{ projects }} on {{ workername }}.buildbot-2.6.0/master/buildbot/reporters/templates/missing_mail.txt000066400000000000000000000005111361162603000256540ustar00rootroot00000000000000The Buildbot working for '{{buildbot_title}}' has noticed that the worker named {{worker.name}} went away. It last disconnected at {{worker.last_connection}}. {% if 'admin' in worker['workerinfo'] %} The admin on record (as reported by WORKER:info/admin) was {{worker.workerinfo.admin}}. {% endif %} Sincerely, -The Buildbot buildbot-2.6.0/master/buildbot/reporters/templates/missing_notification.txt000066400000000000000000000002221361162603000274170ustar00rootroot00000000000000The Buildbot '{{buildbot_title}}' has noticed that the worker named {{worker.name}} went away. It last disconnected at {{worker.last_connection}}.buildbot-2.6.0/master/buildbot/reporters/utils.py000066400000000000000000000167251361162603000221720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import UserList from twisted.internet import defer from twisted.python import log from buildbot.data import resultspec from buildbot.process.properties import renderer from buildbot.process.results import RETRY from buildbot.util import flatten @defer.inlineCallbacks def getPreviousBuild(master, build): # naive n-1 algorithm. Still need to define what we should skip # SKIP builds? forced builds? rebuilds? # don't hesitate to contribute improvements to that algorithm n = build['number'] - 1 while n >= 0: prev = yield master.data.get(("builders", build['builderid'], "builds", n)) if prev and prev['results'] != RETRY: return prev n -= 1 return None @defer.inlineCallbacks def getDetailsForBuildset(master, bsid, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): # Here we will do a bunch of data api calls on behalf of the reporters # We do try to make *some* calls in parallel with the help of gatherResults, but don't commit # to much in that. The idea is to do parallelism while keeping the code readable # and maintainable. # first, just get the buildset and all build requests for our buildset id dl = [master.data.get(("buildsets", bsid)), master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [bsid])])] (buildset, breqs) = yield defer.gatherResults(dl) # next, get the bdictlist for each build request dl = [master.data.get(("buildrequests", breq['buildrequestid'], 'builds')) for breq in breqs] builds = yield defer.gatherResults(dl) builds = flatten(builds, types=(list, UserList)) if builds: yield getDetailsForBuilds(master, buildset, builds, wantProperties=wantProperties, wantSteps=wantSteps, wantPreviousBuild=wantPreviousBuild, wantLogs=wantLogs) return dict(buildset=buildset, builds=builds) @defer.inlineCallbacks def getDetailsForBuild(master, build, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): buildrequest = yield master.data.get(("buildrequests", build['buildrequestid'])) buildset = yield master.data.get(("buildsets", buildrequest['buildsetid'])) build['buildrequest'], build['buildset'] = buildrequest, buildset ret = yield getDetailsForBuilds(master, buildset, [build], wantProperties=wantProperties, wantSteps=wantSteps, wantPreviousBuild=wantPreviousBuild, wantLogs=wantLogs) return ret @defer.inlineCallbacks def getDetailsForBuilds(master, buildset, builds, wantProperties=False, wantSteps=False, wantPreviousBuild=False, wantLogs=False): builderids = {build['builderid'] for build in builds} builders = yield defer.gatherResults([master.data.get(("builders", _id)) for _id in builderids]) buildersbyid = {builder['builderid']: builder for builder in builders} if wantProperties: buildproperties = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'properties')) for build in builds]) else: # we still need a list for the big zip buildproperties = list(range(len(builds))) if wantPreviousBuild: prev_builds = yield defer.gatherResults( [getPreviousBuild(master, build) for build in builds]) else: # we still need a list for the big zip prev_builds = list(range(len(builds))) if wantSteps: buildsteps = yield defer.gatherResults( [master.data.get(("builds", build['buildid'], 'steps')) for build in builds]) if wantLogs: for s in flatten(buildsteps, types=(list, UserList)): logs = yield master.data.get(("steps", s['stepid'], 'logs')) s['logs'] = list(logs) for l in s['logs']: l['content'] = yield master.data.get(("logs", l['logid'], 'contents')) else: # we still need a list for the big zip buildsteps = list(range(len(builds))) # a big zip to connect everything together for build, properties, steps, prev in zip(builds, buildproperties, buildsteps, prev_builds): build['builder'] = buildersbyid[build['builderid']] build['buildset'] = buildset build['url'] = getURLForBuild( master, build['builderid'], build['number']) if wantProperties: build['properties'] = properties if wantSteps: build['steps'] = list(steps) if wantPreviousBuild: build['prev_build'] = prev # perhaps we need data api for users with sourcestamps/:id/users @defer.inlineCallbacks def getResponsibleUsersForSourceStamp(master, sourcestampid): changesd = master.data.get(("sourcestamps", sourcestampid, "changes")) sourcestampd = master.data.get(("sourcestamps", sourcestampid)) changes, sourcestamp = yield defer.gatherResults([changesd, sourcestampd]) blamelist = set() # normally, we get only one, but just assume there might be several for c in changes: blamelist.add(c['author']) # Add patch author to blamelist if 'patch' in sourcestamp and sourcestamp['patch'] is not None: blamelist.add(sourcestamp['patch']['author']) blamelist = list(blamelist) blamelist.sort() return blamelist # perhaps we need data api for users with builds/:id/users @defer.inlineCallbacks def getResponsibleUsersForBuild(master, buildid): dl = [ master.data.get(("builds", buildid, "changes")), master.data.get(("builds", buildid, 'properties')) ] changes, properties = yield defer.gatherResults(dl) blamelist = set() # add users from changes for c in changes: blamelist.add(c['author']) # add owner from properties if 'owner' in properties: owner = properties['owner'][0] if isinstance(owner, str): blamelist.add(owner) else: blamelist.update(owner) log.msg( "Warning: owner property is a list for buildid {}. ".format(buildid)) log.msg("Please report a bug: changes: {}. properties: {}".format( changes, properties)) # add owner from properties if 'owners' in properties: blamelist.update(properties['owners'][0]) blamelist = list(blamelist) blamelist.sort() return blamelist def getURLForBuild(master, builderid, build_number): prefix = master.config.buildbotURL return prefix + "#builders/%d/builds/%d" % ( builderid, build_number) @renderer def URLForBuild(props): build = props.getBuild() return build.getUrl() buildbot-2.6.0/master/buildbot/reporters/words.py000066400000000000000000001426461361162603000221720ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random import re import shlex from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.python import log from twisted.python import usage from twisted.web import resource from twisted.web import server from buildbot import util from buildbot import version from buildbot.data import resultspec from buildbot.plugins.db import get_plugins from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import statusToString from buildbot.reporters import utils from buildbot.util import service from buildbot.util import unicode2bytes # Used in command_HELLO and it's test. 'Hi' in 100 languages. GREETINGS = [ "ږغ كول ، هركلى كول ږغ، هركلى", "Goeie dag", "Tungjatjeta", "Yatasay", "Ahlan bik", "Voghdzuyin", "hola", "kaixo", "Horas", "Pryvitańnie", "Nomoskar", "Oki", "Selam", "Dez-mat", "Zdrávejte", "Mingala ba", "Hola", "Hafa dai", "Oh-see-YOH", "Nín hao", "Bonjou", "Zdravo", "Nazdar", "Hallo", "Hallo", "Iiti", "Kotáka", "Saluton", "Tere", "Hallo", "Hallo", "Bula", "Helo", "Hei", "Goede morgen", "Bonjour", "Hoi", "Ola", "Gamardžoba", "Guten Tag", "Mauri", "Geia!", "Inuugujoq", "Kem cho", "Sannu", "Aloha", "Shalóm", "Namasté", "Szia", "Halló", "Hai", "Kiana", "Dia is muire dhuit", "Buongiorno", "Kónnichi wa", "Salam", "Annyeonghaseyo", "Na", "Sabai dii", "Ave", "Es mīlu tevi", "Labas.", "Selamat petang", "Ni hao", "Kia ora", "Yokwe", "Kwe", "sain baina uu", "niltze", "Yá'át'ééh", "Namaste", "Hallo.", "Salâm", "Witajcie", "Olá", "Kâils", "Aroha", "Salut", "Privét", "Talofa", "Namo namah", "ćao", "Nazdar", "Zdravo", "Hola", "Jambo", "Hej", "Sälü", "Halo", "Selam", "Sàwàtdee kráp", "Dumela", "Merhaba", "Pryvít", "Adaab arz hai", "Chào", "Glidis", "Helo", "Sawubona", "Hoi"] class UsageError(ValueError): # pylint: disable=useless-super-delegation def __init__(self, string="Invalid usage", *more): # This is not useless as we change the default value of an argument. # This bug is reported as "fixed" but apparently, it is not. # https://github.com/PyCQA/pylint/issues/1085 # (Maybe there is a problem with builtin exceptions). super().__init__(string, *more) class ForceOptions(usage.Options): optParameters = [ ["builder", None, None, "which Builder to start"], ["codebase", None, "", "which codebase to build"], ["branch", None, "master", "which branch to build"], ["revision", None, "HEAD", "which revision to build"], ["project", None, "", "which project to build"], ["reason", None, None, "the reason for starting the build"], ["props", None, None, "A set of properties made available in the build environment, " "format is --properties=prop1=value1,prop2=value2,.. " "option can be specified multiple times."], ] def parseArgs(self, *args): args = list(args) if args: if self['builder'] is not None: raise UsageError("--builder provided in two ways") self['builder'] = args.pop(0) if args: # args might be modified above if self['reason'] is not None: raise UsageError("--reason provided in two ways") self['reason'] = " ".join(args) dangerous_commands = [] def dangerousCommand(method): command = method.__name__ if not command.startswith('command_'): raise ValueError('@dangerousCommand can be used only for commands') dangerous_commands.append(command[8:]) return method class Channel(service.AsyncService): """ This class holds what should be shared between users on a single channel. In particular it is responsible for maintaining notification states and send notifications. """ def __init__(self, bot, channel): self.name = "Channel({})".format(channel) self.id = channel self.bot = bot self.notify_events = set() self.subscribed = [] self.build_subscriptions = [] self.reported_builds = [] # tuples (when, buildername, buildnum) self.missing_workers = set() self.useRevisions = bot.useRevisions def send(self, message, **kwargs): return self.bot.send_message(self.id, message, **kwargs) def stopService(self): if self.subscribed: self.unsubscribe_from_build_events() def validate_notification_event(self, event): if not re.compile("^(started|finished|success|warnings|failure|exception|" "cancelled|problem|recovery|worse|better|worker|" # this is deprecated list "(success|warnings|failure|exception)To" "(Success|Warnings|Failure|Exception))$").match(event): raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off _EVENT_'.") @defer.inlineCallbacks def list_notified_events(self): if self.notify_events: yield self.send("The following events are being notified: {}." .format(", ".join(sorted(self.notify_events)))) else: yield self.send("No events are being notified.") def notify_for(self, *events): for event in events: if event in self.notify_events: return True return False @defer.inlineCallbacks def subscribe_to_build_events(self): startConsuming = self.master.mq.startConsuming def buildStarted(key, msg): return self.buildStarted(msg) def buildFinished(key, msg): return self.buildFinished(msg) def workerEvent(key, msg): if key[2] == 'missing': return self.workerMissing(msg) if key[2] == 'connected': return self.workerConnected(msg) for e, f in (("new", buildStarted), # BuilderStarted ("finished", buildFinished)): # BuilderFinished handle = yield startConsuming(f, ('builders', None, 'builds', None, e)) self.subscribed.append(handle) handle = yield startConsuming(workerEvent, ('workers', None, None)) self.subscribed.append(handle) def unsubscribe_from_build_events(self): # Cancel all the subscriptions we have old_list, self.subscribed = self.subscribed, [] for handle in old_list: handle.stopConsuming() def add_notification_events(self, events): for event in events: self.validate_notification_event(event) self.notify_events.add(event) if not self.subscribed: self.subscribe_to_build_events() def remove_notification_events(self, events): for event in events: self.validate_notification_event(event) self.notify_events.remove(event) if not self.notify_events: self.unsubscribe_from_build_events() def remove_all_notification_events(self): self.notify_events = set() if self.subscribed: self.unsubscribe_from_build_events() def shouldReportBuild(self, builder, buildnum): """Returns True if this build should be reported for this contact (eliminating duplicates), and also records the report for later""" for w, b, n in self.reported_builds: if b == builder and n == buildnum: return False self.reported_builds.append([util.now(), builder, buildnum]) # clean the reported builds horizon = util.now() - 60 while self.reported_builds and self.reported_builds[0][0] < horizon: self.reported_builds.pop(0) # and return True, since this is a new one return True @defer.inlineCallbacks def buildStarted(self, build): builder = yield self.bot.getBuilder(builderid=build['builderid']) builderName = builder['name'] buildNumber = build['number'] log.msg('[Contact] Builder {} started'.format(builder['name'], )) # only notify about builders we are interested in if (self.bot.tags is not None and not self.builderMatchesAnyTag(builder.get('tags', []))): log.msg('Not notifying for a build that does not match any tags') return if not self.notify_for('started'): return if self.useRevisions: revisions = yield self.getRevisionsForBuild(build) r = "Build containing revision(s) {} on {} started" \ .format(','.join(revisions), builderName) else: # Abbreviate long lists of changes to simply two # revisions, and the number of additional changes. # TODO: We can't get the list of the changes related to a build in # nine changes_str = "" url = utils.getURLForBuild(self.master, builder['builderid'], build['number']) r = "Build [#{:d}]({}) of `{}` started".format(buildNumber, url, builderName) if changes_str: r += " ({})".format(changes_str) self.send(r + ".") @defer.inlineCallbacks def buildFinished(self, build, watched=False): builder = yield self.bot.getBuilder(builderid=build['builderid']) builderName = builder['name'] buildNumber = build['number'] # only notify about builders we are interested in if (self.bot.tags is not None and not self.bot.builderMatchesAnyTag(builder.get('tags', []))): log.msg('Not notifying for a build that does not match any tags') return if not (watched or (yield self.notify_for_finished(build))): return if not self.shouldReportBuild(builderName, buildNumber): return url = utils.getURLForBuild(self.master, builder['builderid'], buildNumber) if self.useRevisions: revisions = yield self.getRevisionsForBuild(build) r = "Build on `{}` containing revision(s) {} {}" \ .format(builderName, ','.join(revisions), self.bot.format_build_status(build)) else: r = "Build [#{:d}]({}) of `{}` {}" \ .format(buildNumber, url, builderName, self.bot.format_build_status(build)) s = build.get('status_string') if build['results'] != SUCCESS and s is not None: r += ": " + s else: r += "." # FIXME: where do we get the list of changes for a build ? # if self.bot.showBlameList and buildResult != SUCCESS and len(build.changes) != 0: # r += ' blamelist: ' + ', '.join(list(set([c.who for c in build.changes]))) self.send(r) @defer.inlineCallbacks def notify_for_finished(self, build): if self.notify_for('finished'): return True result = build['results'] result_name = statusToString(result) if self.notify_for(result_name): return True if result in self.bot.results_severity and \ (self.notify_for('better', 'worse', 'problem', 'recovery') or any('To' in e for e in self.notify_events)): prev_build = yield self.master.data.get( ('builders', build['builderid'], 'builds', build['number'] - 1)) if prev_build: prev_result = prev_build['results'] if prev_result in self.bot.results_severity: result_severity = self.bot.results_severity.index(result) prev_result_severity = self.bot.results_severity.index(prev_result) if self.notify_for('better') and \ result_severity < prev_result_severity: return True if self.notify_for('worse') and \ result_severity > prev_result_severity: return True if self.notify_for('problem') \ and prev_result in (SUCCESS, WARNINGS) \ and result in (FAILURE, EXCEPTION): return True if self.notify_for('recovery') \ and prev_result in (FAILURE, EXCEPTION) \ and result in (SUCCESS, WARNINGS): return True # DEPRECATED required_notification_control_string = ''.join( (statusToString(prev_result).lower(), 'To', result_name.capitalize())) if (self.notify_for(required_notification_control_string)): return True return False def workerMissing(self, worker): self.missing_workers.add(worker['workerid']) if self.notify_for('worker'): self.send("Worker `{name}` is missing. It was seen last on {last_connection}.".format(**worker)) self.bot.saveMissingWorkers() def workerConnected(self, worker): workerid = worker['workerid'] if workerid in self.missing_workers: self.missing_workers.remove(workerid) if self.notify_for('worker'): self.send("Worker `{name}` is back online.".format(**worker)) self.bot.saveMissingWorkers() class Contact: """I hold the state for a single user's interaction with the buildbot. There will be one instance of me for each user who interacts personally with the buildbot. There will be an additional instance for each 'broadcast contact' (chat rooms, IRC channels as a whole). """ def __init__(self, user, channel): """ :param StatusBot bot: StatusBot this Contact belongs to :param user: User ID representing this contact :param channel: Channel this contact is on """ self.user_id = user self.channel = channel @property def bot(self): return self.channel.bot @property def master(self): return self.channel.bot.master @property def is_private_chat(self): return self.user_id == self.channel.id @staticmethod def overrideCommand(meth): try: base_meth = getattr(Contact, meth.__name__) except AttributeError: pass else: try: meth.__doc__ = base_meth.__doc__ except AttributeError: pass try: meth.usage = base_meth.usage except AttributeError: pass return meth # Communication with the user def send(self, message, **kwargs): return self.channel.send(message, **kwargs) def access_denied(self, *args, **kwargs): return self.send("Thou shall not pass, {}!!!".format(self.user_id)) # Main dispatchers for incoming messages def getCommandMethod(self, command): command = command.upper() try: method = getattr(self, 'command_' + command) except AttributeError: return None get_authz = self.bot.authz.get acl = get_authz(command) if acl is None: if command in dangerous_commands: acl = get_authz('!', False) else: acl = get_authz('', True) acl = get_authz('*', acl) if isinstance(acl, (list, tuple)): acl = self.user_id in acl elif acl not in (True, False, None): acl = self.user_id == acl if not acl: return self.access_denied return method @defer.inlineCallbacks def handleMessage(self, message, **kwargs): message = message.lstrip() parts = message.split(' ', 1) if len(parts) == 1: parts = parts + [''] cmd, args = parts cmd_suffix = self.bot.commandSuffix if cmd_suffix and cmd.endswith(cmd_suffix): cmd = cmd[:-len(cmd_suffix)] self.bot.log("Received command `{}` from {}".format(cmd, self.describeUser())) if cmd.startswith(self.bot.commandPrefix): meth = self.getCommandMethod(cmd[len(self.bot.commandPrefix):]) else: meth = None if not meth: if message[-1] == '!': self.send("What you say!") return elif cmd.startswith(self.bot.commandPrefix): self.send("I don't get this '{}'...".format(cmd)) meth = self.command_COMMANDS else: if self.is_private_chat: self.send("Say what?") return try: result = yield meth(args.strip(), **kwargs) except UsageError as e: self.send(str(e)) return except Exception as e: self.bot.log_err(e) self.send("Something bad happened (see logs)") return return result def splitArgs(self, args): """Returns list of arguments parsed by shlex.split() or raise UsageError if failed""" try: return shlex.split(args) except ValueError as e: raise UsageError(e) def command_HELLO(self, args, **kwargs): """say hello""" self.send(random.choice(GREETINGS)) def command_VERSION(self, args, **kwargs): """show buildbot version""" self.send("This is buildbot-{} at your service".format(version)) @defer.inlineCallbacks def command_LIST(self, args, **kwargs): """list configured builders or workers""" args = self.splitArgs(args) all = False num = 10 try: num = int(args[0]) del args[0] except ValueError: if args[0] == 'all': all = True del args[0] except IndexError: pass if not args: raise UsageError("Try '" + self.bot.commandPrefix + "list [all|N] builders|workers|changes'.") if args[0] == 'builders': bdicts = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() response = ["I found the following builders:"] for bdict in bdicts: if bdict['builderid'] in online_builderids: response.append(bdict['name']) elif all: response.append(bdict['name']) response.append("[offline]") self.send(' '.join(response)) elif args[0] == 'workers': workers = yield self.master.data.get(('workers',)) response = ["I found the following workers:"] for worker in workers: if worker['configured_on']: response.append(worker['name']) if not worker['connected_to']: response.append("[disconnected]") elif all: response.append(worker['name']) response.append("[offline]") self.send(' '.join(response)) return elif args[0] == 'changes': if all: self.send("Do you really want me to list all changes? It can be thousands!\n" "If you want to be flooded, specify the maximum number of changes to show.\n" "Right now, I will show up to 100 recent changes.") num = 100 changes = yield self.master.db.changes.getRecentChanges(num) response = ["I found the following recent changes:"] for change in reversed(changes): change['comment'] = change['comments'].split('\n')[0] change['date'] = change['when_timestamp'].strftime('%Y-%m-%d %H:%M') response.append( "{comment})\n" "Author: {author}\n" "Date: {date}\n" "Repository: {repository}\n" "Branch: {branch}\n" "Revision: {revision}\n".format(**change)) self.send('\n\n'.join(response)) command_LIST.usage = "list [all|N] builders|workers|changes - " \ "list configured builders, workers, or N recent changes" @defer.inlineCallbacks def command_STATUS(self, args, **kwargs): """list status of a builder (or all builders)""" args = self.splitArgs(args) if not args: which = "" elif len(args) == 1: which = args[0] else: raise UsageError("Try '" + self.bot.commandPrefix + "status _builder_'.") response = [] if which == "": builders = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() for builder in builders: if builder['builderid'] in online_builderids: status = yield self.bot.getBuildStatus(builder['name'], short=True) response.append(status) elif which == "all": builders = yield self.bot.getAllBuilders() for builder in builders: status = yield self.bot.getBuildStatus(builder['name'], short=True) response.append(status) else: status = yield self.bot.getBuildStatus(which) response.append(status) if response: self.send('\n'.join(response)) command_STATUS.usage = "status [_which_] - list status of a builder (or all builders)" @defer.inlineCallbacks def command_NOTIFY(self, args, **kwargs): """notify me about build events""" args = self.splitArgs(args) if not args: raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.") action = args.pop(0) events = args if action in ("on", "on-quiet"): if not events: events = ('started', 'finished') self.channel.add_notification_events(events) if action == "on": yield self.channel.list_notified_events() self.bot.saveNotifyEvents() elif action in ("off", "off-quiet"): if events: self.channel.remove_notification_events(events) else: self.channel.remove_all_notification_events() if action == "off": yield self.channel.list_notified_events() self.bot.saveNotifyEvents() elif action == "list": yield self.channel.list_notified_events() else: raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.") command_NOTIFY.usage = ("notify on|off|list [_EVENT_] ... - notify me about build events;" " event should be one or more of: 'started', 'finished', 'failure'," " 'success', 'exception', 'problem', 'recovery', 'better', or 'worse'") @defer.inlineCallbacks def command_WATCH(self, args, **kwargs): """announce the completion of an active build""" args = self.splitArgs(args) if len(args) != 1: raise UsageError("Try '" + self.bot.commandPrefix + "watch _builder_'.") which = args[0] builder = yield self.bot.getBuilder(buildername=which) # Get current builds on this builder. builds = yield self.bot.getRunningBuilds(builder['builderid']) if not builds: self.send("There are no currently running builds.") return def watchForCompleteEvent(key, msg): if key[-1] in ('finished', 'complete'): return self.channel.buildFinished(msg, watched=True) for build in builds: startConsuming = self.master.mq.startConsuming handle = yield startConsuming( watchForCompleteEvent, ('builds', str(build['buildid']), None)) self.channel.build_subscriptions.append((build['buildid'], handle)) url = utils.getURLForBuild(self.master, builder['builderid'], build['number']) if self.bot.useRevisions: revisions = yield self.bot.getRevisionsForBuild(build) r = "Watching build on `{}` containing revision(s) {} until it finishes..." \ .format(which, ','.join(revisions)) else: r = "Watching build [#{:d}]({}) of `{}` until it finishes..." \ .format(build['number'], url, which) self.send(r) command_WATCH.usage = "watch _which_ - announce the completion of an active build" @defer.inlineCallbacks @dangerousCommand def command_FORCE(self, args, **kwargs): """force a build""" # FIXME: NEED TO THINK ABOUT! errReply = "Try '{}{}'".format(self.bot.commandPrefix, self.command_FORCE.usage) args = self.splitArgs(args) if not args: raise UsageError(errReply) what = args.pop(0) if what != "build": raise UsageError(errReply) opts = ForceOptions() opts.parseOptions(args) builderName = opts['builder'] builder = yield self.bot.getBuilder(buildername=builderName) branch = opts['branch'] revision = opts['revision'] codebase = opts['codebase'] project = opts['project'] reason = opts['reason'] props = opts['props'] if builderName is None: raise UsageError("you must provide a Builder, " + errReply) # keep weird stuff out of the branch, revision, and properties args. branch_validate = self.master.config.validation['branch'] revision_validate = self.master.config.validation['revision'] pname_validate = self.master.config.validation['property_name'] pval_validate = self.master.config.validation['property_value'] if branch and not branch_validate.match(branch): self.bot.log("Force: bad branch '{}'".format(branch)) self.send("Sorry, bad branch '{}'".format(branch)) return if revision and not revision_validate.match(revision): self.bot.log("Force: bad revision '{}'".format(revision)) self.send("Sorry, bad revision '{}'".format(revision)) return properties = Properties() properties.master = self.master if props: # split props into name:value dict pdict = {} propertylist = props.split(",") for prop in propertylist: splitproperty = prop.split("=", 1) pdict[splitproperty[0]] = splitproperty[1] # set properties for prop in pdict: pname = prop pvalue = pdict[prop] if not pname_validate.match(pname) \ or not pval_validate.match(pvalue): self.bot.log("Force: bad property name='{}', value='{}'" .format(pname, pvalue)) self.send("Sorry, bad property name='{}', value='{}'" .format(pname, pvalue)) return properties.setProperty(pname, pvalue, "Force Build Chat") properties.setProperty("reason", reason, "Force Build Chat") properties.setProperty("owner", self.describeUser(), "Force Build Chat") reason = "forced: by {}: {}".format(self.describeUser(), reason) try: yield self.master.data.updates.addBuildset(builderids=[builder['builderid']], # For now, we just use # this as the id. scheduler="status.words", sourcestamps=[{ 'codebase': codebase, 'branch': branch, 'revision': revision, 'project': project, 'repository': ""}], reason=reason, properties=properties.asDict(), waited_for=False) except AssertionError as e: self.send("I can't: " + str(e)) else: self.send("Force build successfully requested.") command_FORCE.usage = ("force build [--codebase=CODEBASE] [--branch=branch] [--revision=revision]" " [--props=prop1=val1,prop2=val2...] _which_ _reason_ - Force a build") @defer.inlineCallbacks @dangerousCommand def command_STOP(self, args, **kwargs): """stop a running build""" args = self.splitArgs(args) if len(args) < 3 or args[0] != 'build': raise UsageError("Try '" + self.bot.commandPrefix + "stop build _which_ _reason_'.") which = args[1] reason = ' '.join(args[2:]) r = "stopped: by {}: {}".format(self.describeUser(), reason) # find an in-progress build builder = yield self.bot.getBuilder(buildername=which) builderid = builder['builderid'] builds = yield self.bot.getRunningBuilds(builderid) if not builds: self.send("Sorry, no build is currently running.") return for bdict in builds: num = bdict['number'] yield self.master.data.control('stop', {'reason': r}, ('builders', builderid, 'builds', num)) if self.bot.useRevisions: revisions = yield self.bot.getRevisionsForBuild(bdict) response = "Build containing revision(s) {} interrupted".format(','.join( revisions)) else: url = utils.getURLForBuild(self.master, builderid, num) response = "Build [#{:d}]({}) of `{}` interrupted.".format(num, url, which) self.send(response) command_STOP.usage = "stop build _which_ _reason_ - Stop a running build" @defer.inlineCallbacks def command_LAST(self, args, **kwargs): """list last build status for a builder""" # FIXME: NEED TO THINK ABOUT! args = self.splitArgs(args) if not args: builders = yield self.bot.getAllBuilders() online_builderids = yield self.bot.getOnlineBuilders() builders = [b for b in builders if b['builderid'] in online_builderids] elif len(args) == 1: arg = args[0] if arg == 'all': builders = yield self.bot.getAllBuilders() else: builder = yield self.bot.getBuilder(buildername=arg) if not builder: raise UsageError("no such builder") builders = [builder] else: raise UsageError("Try '" + self.bot.commandPrefix + "last _builder_'.") messages = [] for builder in builders: lastBuild = yield self.bot.getLastCompletedBuild(builder['builderid']) if not lastBuild: status = "no builds run since last restart" else: complete_at = lastBuild['complete_at'] if complete_at: complete_at = util.datetime2epoch(complete_at) ago = util.fuzzyInterval(int(reactor.seconds() - complete_at)) else: ago = "??" status = self.bot.format_build_status(lastBuild) status = 'last build {} ({} ago)'.format(status, ago) if lastBuild['results'] != SUCCESS: status += ': {}'.format(lastBuild['state_string']) messages.append("`{}`: {}".format(builder['name'], status)) if messages: self.send('\n'.join(messages)) command_LAST.usage = "last [_which_] - list last build status for builder _which_" @classmethod def build_commands(cls): commands = [] for k in dir(cls): if k.startswith('command_'): commands.append(k[8:].lower()) commands.sort() return commands def describeUser(self): if self.is_private_chat: return self.user_id return "{} on {}".format(self.user_id, self.channel.id) # commands def command_HELP(self, args, **kwargs): """give help for a command or one of it's arguments""" args = self.splitArgs(args) if not args: commands = self.build_commands() response = [] for command in commands: meth = getattr(self, 'command_' + command.upper()) doc = getattr(meth, '__doc__', None) if doc: response.append("{} - {}".format(command, doc)) if response: self.send('\n'.join(response)) return command = args[0] if command.startswith(self.bot.commandPrefix): command = command[len(self.bot.commandPrefix):] meth = getattr(self, 'command_' + command.upper(), None) if not meth: raise UsageError("There is no such command '{}'.".format(args[0])) doc = getattr(meth, 'usage', None) if isinstance(doc, dict): if len(args) == 1: k = None # command elif len(args) == 2: k = args[1] # command arg else: k = tuple(args[1:]) # command arg subarg ... doc = doc.get(k, None) elif callable(doc): try: doc = doc(*args[1:]) except (TypeError, ValueError): doc = None if doc: self.send("Usage: {}{}".format(self.bot.commandPrefix, doc)) else: self.send( "No usage info for " + ' '.join(["'{}'".format(arg) for arg in args])) command_HELP.usage = ("help [_command_ _arg_ [_subarg_ ...]] - " "Give help for _command_ or one of it's arguments") def command_SOURCE(self, args, **kwargs): "the source code for buildbot" self.send("My source can be found at " "https://github.com/buildbot/buildbot") command_SOURCE.usage = "source - the source code for Buildbot" def command_COMMANDS(self, args, **kwargs): """list available commands""" commands = self.build_commands() str = "Buildbot commands: " + ", ".join(self.bot.commandPrefix + c for c in commands) self.send(str) command_COMMANDS.usage = "commands - List available commands" @dangerousCommand def command_SHUTDOWN(self, args, **kwargs): """shutdown the buildbot master""" # FIXME: NEED TO THINK ABOUT! if args not in ('check', 'start', 'stop', 'now'): raise UsageError("Try '" + self.bot.commandPrefix + "shutdown check|start|stop|now'.") botmaster = self.channel.master.botmaster shuttingDown = botmaster.shuttingDown if args == 'check': if shuttingDown: self.send("Status: buildbot is shutting down.") else: self.send("Status: buildbot is running.") elif args == 'start': if shuttingDown: self.send("Shutdown already started.") else: self.send("Starting clean shutdown.") botmaster.cleanShutdown() elif args == 'stop': if not shuttingDown: self.send("There is no ongoing shutdown to stop.") else: self.send("Stopping clean shutdown.") botmaster.cancelCleanShutdown() elif args == 'now': self.send("Stopping buildbot.") reactor.stop() command_SHUTDOWN.usage = { None: "shutdown check|start|stop|now - shutdown the buildbot master", "check": "shutdown check - check if the buildbot master is running or shutting down", "start": "shutdown start - start a clean shutdown", "stop": "shutdown cancel - stop the clean shutdown", "now": "shutdown now - shutdown immediately without waiting for the builders to finish"} class StatusBot(service.AsyncMultiService): """ Abstract status bot """ contactClass = Contact channelClass = Channel commandPrefix = '' commandSuffix = None offline_string = "offline" idle_string = "idle" running_string = "running:" def __init__(self, authz=None, tags=None, notify_events=None, useRevisions=False, showBlameList=False): super().__init__() self.tags = tags if notify_events is None: notify_events = {} self.notify_events = notify_events self.useRevisions = useRevisions self.showBlameList = showBlameList self.authz = self.expand_authz(authz) self.contacts = {} self.channels = {} @staticmethod def expand_authz(authz): if authz is None: authz = {} expanded_authz = {} for cmds, val in authz.items(): if not isinstance(cmds, (tuple, list)): cmds = (cmds,) for cmd in cmds: expanded_authz[cmd.upper()] = val return expanded_authz def isValidUser(self, user): for auth in self.authz.values(): if auth is True \ or (isinstance(auth, (list, tuple)) and user in auth)\ or user == auth: return True # If user is in '', we have already returned; otherwise check if defaults apply return '' not in self.authz def getContact(self, user, channel): """ get a Contact instance for ``user`` on ``channel`` """ try: return self.contacts[(channel, user)] except KeyError: valid = self.isValidUser(user) new_contact = self.contactClass(user=user, channel=self.getChannel(channel, valid)) if valid: self.contacts[(channel, user)] = new_contact return new_contact def getChannel(self, channel, valid=True): try: return self.channels[channel] except KeyError: new_channel = self.channelClass(self, channel) if valid: self.channels[channel] = new_channel new_channel.setServiceParent(self) return new_channel def _get_object_id(self): return self.master.db.state.getObjectId( self.nickname, '{0.__module__}.{0.__name__}'.format(self.__class__)) @defer.inlineCallbacks def _save_channels_state(self, attr, json_type=None): if json_type is None: json_type = lambda x: x data = [(k, v) for k, v in ((channel.id, json_type(getattr(channel, attr))) for channel in self.channels.values()) if v] try: objectid = yield self._get_object_id() yield self.master.db.state.setState(objectid, attr, data) except Exception as err: self.log_err(err, "saveState '{}'".format(attr)) @defer.inlineCallbacks def _load_channels_state(self, attr, setter): try: objectid = yield self._get_object_id() data = yield self.master.db.state.getState(objectid, attr, ()) except Exception as err: self.log_err(err, "loadState ({})".format(attr)) else: if data is not None: for c, d in data: try: setter(self.getChannel(c), d) except Exception as err: self.log_err(err, "loadState '{}' ({})".format(attr, c)) @defer.inlineCallbacks def loadState(self): yield self._load_channels_state('notify_events', lambda c, e: c.add_notification_events(e)) yield self._load_channels_state('missing_workers', lambda c, w: c.missing_workers.update(w)) @defer.inlineCallbacks def saveNotifyEvents(self): yield self._save_channels_state('notify_events', list) @defer.inlineCallbacks def saveMissingWorkers(self): yield self._save_channels_state('missing_workers', list) def send_message(self, chat, message, **kwargs): raise NotImplementedError() def _get_log_system(self, source): if source is None: source = self.__class__.__name__ try: parent = self.parent.name except AttributeError: parent = '-' name = "{},{}".format(parent, source) return name def log(self, msg, source=None): log.callWithContext({"system": self._get_log_system(source)}, log.msg, msg) def log_err(self, error=None, why=None, source=None): log.callWithContext({"system": (self._get_log_system(source))}, log.err, error, why) def builderMatchesAnyTag(self, builder_tags): return any(tag for tag in builder_tags if tag in self.tags) def getRunningBuilds(self, builderid): d = self.master.data.get(('builds',), filters=[resultspec.Filter('builderid', 'eq', [builderid]), resultspec.Filter('complete', 'eq', [False])]) return d def getLastCompletedBuild(self, builderid): d = self.master.data.get(('builds',), filters=[resultspec.Filter('builderid', 'eq', [builderid]), resultspec.Filter('complete', 'eq', [True])], order=['-number'], limit=1) @d.addCallback def listAsOneOrNone(res): if res: return res[0] return None return d def getCurrentBuildstep(self, build): d = self.master.data.get(('builds', build['buildid'], 'steps'), filters=[ resultspec.Filter('complete', 'eq', [False])], order=['number'], limit=1) return d @defer.inlineCallbacks def getBuildStatus(self, which, short=False): response = '`{}`: '.format(which) builder = yield self.getBuilder(buildername=which) builderid = builder['builderid'] runningBuilds = yield self.getRunningBuilds(builderid) # pylint: disable=too-many-nested-blocks if not runningBuilds: onlineBuilders = yield self.getOnlineBuilders() if builderid in onlineBuilders: response += self.idle_string lastBuild = yield self.getLastCompletedBuild(builderid) if lastBuild: complete_at = lastBuild['complete_at'] if complete_at: complete_at = util.datetime2epoch(complete_at) ago = util.fuzzyInterval(int(reactor.seconds() - complete_at)) else: ago = "??" status = self.format_build_status(lastBuild, short=short) if not short: status = ", " + status if lastBuild['results'] != SUCCESS: status_string = lastBuild.get('status_string') if status_string: status += ": " + status_string response += ' last build {} ago{}'.format(ago, status) else: response += self.offline_string else: response += self.running_string buildInfo = [] for build in runningBuilds: step = yield self.getCurrentBuildstep(build) if step: s = "({})".format(step[-1]['state_string']) else: s = "(no current step)" bnum = build['number'] url = utils.getURLForBuild(self.master, builderid, bnum) buildInfo.append("build [#{:d}]({}) {}".format(bnum, url, s)) response += ' ' + ', '.join(buildInfo) return response @defer.inlineCallbacks def getBuilder(self, buildername=None, builderid=None): if buildername: bdicts = yield self.master.data.get(('builders',), filters=[resultspec.Filter('name', 'eq', [buildername])]) if bdicts: # Could there be more than one? One is enough. bdict = bdicts[0] else: bdict = None elif builderid: bdict = yield self.master.data.get(('builders', builderid)) else: raise UsageError("no builder specified") if bdict is None: if buildername: which = buildername else: which = 'number {}'.format(builderid) raise UsageError("no such builder '{}'".format(which)) return bdict def getAllBuilders(self): d = self.master.data.get(('builders',)) return d @defer.inlineCallbacks def getOnlineBuilders(self): all_workers = yield self.master.data.get(('workers',)) online_builderids = set() for worker in all_workers: connected = worker['connected_to'] if not connected: continue builders = worker['configured_on'] builderids = [builder['builderid'] for builder in builders] online_builderids.update(builderids) return list(online_builderids) @defer.inlineCallbacks def getRevisionsForBuild(self, bdict): # FIXME: Need to get revision info! (build -> buildreq -> buildset -> # sourcestamps) return ["TODO"] results_descriptions = { SKIPPED: "was skipped", SUCCESS: "completed successfully", WARNINGS: "completed with warnings", FAILURE: "failed", EXCEPTION: "stopped with exception", RETRY: "has been retried", CANCELLED: "was cancelled", } results_severity = ( SKIPPED, SUCCESS, WARNINGS, FAILURE, CANCELLED, EXCEPTION ) def format_build_status(self, build, short=False): """ Optionally add color to the message """ return self.results_descriptions[build['results']] class ThrottledClientFactory(protocol.ClientFactory): lostDelay = random.randint(1, 5) failedDelay = random.randint(45, 60) def __init__(self, lostDelay=None, failedDelay=None): if lostDelay is not None: self.lostDelay = lostDelay if failedDelay is not None: self.failedDelay = failedDelay def clientConnectionLost(self, connector, reason): reactor.callLater(self.lostDelay, connector.connect) def clientConnectionFailed(self, connector, reason): reactor.callLater(self.failedDelay, connector.connect) class WebhookResource(resource.Resource, service.AsyncService): """ This is a service be used by chat bots based on web-hooks. It automatically sets and deletes the resource and calls ``process_webhook`` method of its parent. """ def __init__(self, path): resource.Resource.__init__(self) www = get_plugins('www', None, load_now=True) if 'base' not in www: raise RuntimeError("could not find buildbot-www; is it installed?") self._root = www.get('base').resource self.path = path def startService(self): self._root.putChild(unicode2bytes(self.path), self) try: super().startService() except AttributeError: pass def stopService(self): try: super().stopService() except AttributeError: pass self._root.delEntity(unicode2bytes(self.path)) def render_GET(self, request): return self.render_POST(request) def render_POST(self, request): try: d = self.parent.process_webhook(request) except Exception: d = defer.fail() def ok(_): request.setResponseCode(202) request.finish() def err(error): try: self.parent.log_err(error, "processing telegram request", self.__class__.__name__) except AttributeError: log.err(error, "processing telegram request") request.setResponseCode(500) request.finish() d.addCallbacks(ok, err) return server.NOT_DONE_YET buildbot-2.6.0/master/buildbot/reporters/zulip.py000066400000000000000000000053041361162603000221640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.reporters.http import HttpStatusPushBase from buildbot.util import httpclientservice from buildbot.util.logger import Logger log = Logger() class ZulipStatusPush(HttpStatusPushBase): name = "ZulipStatusPush" neededDetails = dict(wantProperties=True) def checkConfig(self, endpoint, token, stream=None, **kwargs): if not isinstance(endpoint, str): config.error("Endpoint must be a string") if not isinstance(token, str): config.error("Token must be a string") super().checkConfig(**kwargs) @defer.inlineCallbacks def reconfigService(self, endpoint, token, stream=None, **kwargs): super().reconfigService(**kwargs) self._http = yield httpclientservice.HTTPClientService.getService( self.master, endpoint, debug=self.debug, verify=self.verify) self.token = token self.stream = stream @defer.inlineCallbacks def send(self, build): event = ("new", "finished")[0 if build["complete_at"] is None else 1] jsondata = dict(event=event, buildid=build["buildid"], buildername=build["builder"]["name"], url=build["url"], project=build["properties"]["project"][0]) if event == "new": jsondata["timestamp"] = int(build["started_at"].timestamp()) elif event == "finished": jsondata["timestamp"] = int(build["complete_at"].timestamp()) jsondata["results"] = build["results"] if self.stream is not None: url = "/api/v1/external/buildbot?api_key={}&stream={}".format(self.token, self.stream) else: url = "/api/v1/external/buildbot?api_key={}".format(self.token) response = yield self._http.post(url, json=jsondata) if response.code != 200: content = yield response.content() log.error("{code}: Error pushing build status to Zulip: {content}", code=response.code, content=content) buildbot-2.6.0/master/buildbot/revlinks.py000066400000000000000000000060241361162603000206310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re class RevlinkMatch: def __init__(self, repo_urls, revlink): if isinstance(repo_urls, str): repo_urls = [repo_urls] self.repo_urls = [re.compile(url) for url in repo_urls] self.revlink = revlink def __call__(self, rev, repo): for url in self.repo_urls: m = url.match(repo) if m: return m.expand(self.revlink) % rev GithubRevlink = RevlinkMatch( repo_urls=[r'https://github.com/([^/]*)/([^/]*?)(?:\.git)?$', r'git://github.com/([^/]*)/([^/]*?)(?:\.git)?$', r'git@github.com:([^/]*)/([^/]*?)(?:\.git)?$', r'ssh://git@github.com/([^/]*)/([^/]*?)(?:\.git)?$' ], revlink=r'https://github.com/\1/\2/commit/%s') class GitwebMatch(RevlinkMatch): def __init__(self, repo_urls, revlink): super().__init__(repo_urls=repo_urls, revlink=revlink + r'?p=\g;a=commit;h=%s') SourceforgeGitRevlink = GitwebMatch( repo_urls=[r'^git://([^.]*).git.sourceforge.net/gitroot/(?P.*)$', r'[^@]*@([^.]*).git.sourceforge.net:gitroot/(?P.*)$', r'ssh://(?:[^@]*@)?([^.]*).git.sourceforge.net/gitroot/(?P.*)$', ], revlink=r'http://\1.git.sourceforge.net/git/gitweb.cgi') # SourceForge recently upgraded to another platform called Allura # See introduction: https://sourceforge.net/p/forge/documentation/Classic%20vs%20New%20SourceForge%20projects/ # And as reference: # https://sourceforge.net/p/forge/community-docs/SVN%20and%20project%20upgrades/ SourceforgeGitRevlink_AlluraPlatform = RevlinkMatch( repo_urls=[r'git://git.code.sf.net/p/(?P.*)$', r'http://git.code.sf.net/p/(?P.*)$', r'ssh://(?:[^@]*@)?git.code.sf.net/p/(?P.*)$' ], revlink=r'https://sourceforge.net/p/\1/ci/%s/') class RevlinkMultiplexer: def __init__(self, *revlinks): self.revlinks = revlinks def __call__(self, rev, repo): for revlink in self.revlinks: url = revlink(rev, repo) if url: return url default_revlink_matcher = RevlinkMultiplexer(GithubRevlink, SourceforgeGitRevlink, SourceforgeGitRevlink_AlluraPlatform) buildbot-2.6.0/master/buildbot/scheduler.py000066400000000000000000000023731361162603000207550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.basic import Scheduler from buildbot.schedulers.dependent import Dependent from buildbot.schedulers.timed import Nightly from buildbot.schedulers.timed import Periodic from buildbot.schedulers.triggerable import Triggerable from buildbot.schedulers.trysched import Try_Jobdir from buildbot.schedulers.trysched import Try_Userpass _hush_pyflakes = [Scheduler, AnyBranchScheduler, Dependent, Periodic, Nightly, Triggerable, Try_Jobdir, Try_Userpass] del _hush_pyflakes buildbot-2.6.0/master/buildbot/schedulers/000077500000000000000000000000001361162603000205615ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/schedulers/__init__.py000066400000000000000000000000001361162603000226600ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/schedulers/base.py000066400000000000000000000361011361162603000220460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot.changes import changes from buildbot.process.properties import Properties from buildbot.util.service import ClusteredBuildbotService from buildbot.util.state import StateMixin @implementer(interfaces.IScheduler) class BaseScheduler(ClusteredBuildbotService, StateMixin): DEFAULT_CODEBASES = {'': {}} compare_attrs = ClusteredBuildbotService.compare_attrs + \ ('builderNames', 'properties', 'codebases') def __init__(self, name, builderNames, properties=None, codebases=DEFAULT_CODEBASES): super(BaseScheduler, self).__init__(name=name) ok = True if interfaces.IRenderable.providedBy(builderNames): pass elif isinstance(builderNames, (list, tuple)): for b in builderNames: if not isinstance(b, str) and \ not interfaces.IRenderable.providedBy(b): ok = False else: ok = False if not ok: config.error( "The builderNames argument to a scheduler must be a list " "of Builder names or an IRenderable object that will render" "to a list of builder names.") self.builderNames = builderNames if properties is None: properties = {} self.properties = Properties() self.properties.update(properties, "Scheduler") self.properties.setProperty("scheduler", name, "Scheduler") self.objectid = None # Set the codebases that are necessary to process the changes # These codebases will always result in a sourcestamp with or without # changes known_keys = set(['branch', 'repository', 'revision']) if codebases is None: config.error("Codebases cannot be None") elif isinstance(codebases, list): codebases = dict((codebase, {}) for codebase in codebases) elif not isinstance(codebases, dict): config.error( "Codebases must be a dict of dicts, or list of strings") else: for codebase, attrs in codebases.items(): if not isinstance(attrs, dict): config.error("Codebases must be a dict of dicts") else: unk = set(attrs) - known_keys if unk: config.error( "Unknown codebase keys %s for codebase %s" % (', '.join(unk), codebase)) self.codebases = codebases # internal variables self._change_consumer = None self._enable_consumer = None self._change_consumption_lock = defer.DeferredLock() self.enabled = True def reconfigService(self, *args, **kwargs): raise NotImplementedError() # activity handling @defer.inlineCallbacks def activate(self): if not self.enabled: return None # even if we aren't called via _activityPoll(), at this point we # need to ensure the service id is set correctly if self.serviceid is None: self.serviceid = yield self._getServiceId() assert self.serviceid is not None schedulerData = yield self._getScheduler(self.serviceid) if schedulerData: self.enabled = schedulerData['enabled'] if not self._enable_consumer: yield self.startConsumingEnableEvents() def _enabledCallback(self, key, msg): if msg['enabled']: self.enabled = True d = self.activate() else: d = self.deactivate() def fn(x): self.enabled = False d.addCallback(fn) return d @defer.inlineCallbacks def deactivate(self): if not self.enabled: return yield defer.maybeDeferred(self._stopConsumingChanges) # service handling def _getServiceId(self): return self.master.data.updates.findSchedulerId(self.name) def _getScheduler(self, sid): return self.master.db.schedulers.getScheduler(sid) def _claimService(self): return self.master.data.updates.trySetSchedulerMaster(self.serviceid, self.master.masterid) def _unclaimService(self): return self.master.data.updates.trySetSchedulerMaster(self.serviceid, None) # status queries # deprecated: these aren't compatible with distributed schedulers def listBuilderNames(self): return self.builderNames # change handling @defer.inlineCallbacks def startConsumingChanges(self, fileIsImportant=None, change_filter=None, onlyImportant=False): assert fileIsImportant is None or callable(fileIsImportant) # register for changes with the data API assert not self._change_consumer self._change_consumer = yield self.master.mq.startConsuming( lambda k, m: self._changeCallback(k, m, fileIsImportant, change_filter, onlyImportant), ('changes', None, 'new')) @defer.inlineCallbacks def startConsumingEnableEvents(self): assert not self._enable_consumer self._enable_consumer = yield self.master.mq.startConsuming( self._enabledCallback, ('schedulers', str(self.serviceid), 'updated')) @defer.inlineCallbacks def _changeCallback(self, key, msg, fileIsImportant, change_filter, onlyImportant): # ignore changes delivered while we're not running if not self._change_consumer: return # get a change object, since the API requires it chdict = yield self.master.db.changes.getChange(msg['changeid']) change = yield changes.Change.fromChdict(self.master, chdict) # filter it if change_filter and not change_filter.filter_change(change): return if change.codebase not in self.codebases: log.msg(format='change contains codebase %(codebase)s that is ' 'not processed by scheduler %(name)s', codebase=change.codebase, name=self.name) return if fileIsImportant: try: important = fileIsImportant(change) if not important and onlyImportant: return except Exception: log.err(failure.Failure(), 'in fileIsImportant check for %s' % change) return else: important = True # use change_consumption_lock to ensure the service does not stop # while this change is being processed d = self._change_consumption_lock.run( self.gotChange, change, important) d.addErrback(log.err, 'while processing change') def _stopConsumingChanges(self): # (note: called automatically in deactivate) # acquire the lock change consumption lock to ensure that any change # consumption is complete before we are done stopping consumption def stop(): if self._change_consumer: self._change_consumer.stopConsuming() self._change_consumer = None return self._change_consumption_lock.run(stop) def gotChange(self, change, important): raise NotImplementedError # starting builds @defer.inlineCallbacks def addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] # convert sourcestamps to a dictionary keyed by codebase stampsByCodebase = {} for ss in sourcestamps: cb = ss['codebase'] if cb in stampsByCodebase: raise RuntimeError("multiple sourcestamps with same codebase") stampsByCodebase[cb] = ss # Merge codebases with the passed list of sourcestamps # This results in a new sourcestamp for each codebase stampsWithDefaults = [] for codebase in self.codebases: cb = yield self.getCodebaseDict(codebase) ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } # apply info from passed sourcestamps onto the configured default # sourcestamp attributes for this codebase. ss.update(stampsByCodebase.get(codebase, {})) stampsWithDefaults.append(ss) # fill in any supplied sourcestamps that aren't for a codebase in the # scheduler's codebase dictionary for codebase in set(stampsByCodebase) - set(self.codebases): cb = stampsByCodebase[codebase] ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } stampsWithDefaults.append(ss) rv = yield self.addBuildsetForSourceStamps( sourcestamps=stampsWithDefaults, reason=reason, waited_for=waited_for, properties=properties, builderNames=builderNames, **kw) return rv def getCodebaseDict(self, codebase): # Hook for subclasses to change codebase parameters when a codebase does # not have a change associated with it. try: return defer.succeed(self.codebases[codebase]) except KeyError: return defer.fail() @defer.inlineCallbacks def addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): if changeids is None: changeids = [] changesByCodebase = {} def get_last_change_for_codebase(codebase): return max(changesByCodebase[codebase], key=lambda change: change["changeid"]) # Changes are retrieved from database and grouped by their codebase for changeid in changeids: chdict = yield self.master.db.changes.getChange(changeid) changesByCodebase.setdefault(chdict["codebase"], []).append(chdict) sourcestamps = [] for codebase in sorted(self.codebases): if codebase not in changesByCodebase: # codebase has no changes # create a sourcestamp that has no changes cb = yield self.getCodebaseDict(codebase) ss = { 'codebase': codebase, 'repository': cb.get('repository', ''), 'branch': cb.get('branch', None), 'revision': cb.get('revision', None), 'project': '', } else: lastChange = get_last_change_for_codebase(codebase) ss = lastChange['sourcestampid'] sourcestamps.append(ss) # add one buildset, using the calculated sourcestamps bsid, brids = yield self.addBuildsetForSourceStamps( waited_for, sourcestamps=sourcestamps, reason=reason, external_idstring=external_idstring, builderNames=builderNames, properties=properties, **kw) return (bsid, brids) @defer.inlineCallbacks def addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] # combine properties if properties: properties.updateFromProperties(self.properties) else: properties = self.properties # make a fresh copy that we actually can modify safely properties = Properties.fromDict(properties.asDict()) # make extra info available from properties.render() properties.master = self.master properties.sourcestamps = [] properties.changes = [] for ss in sourcestamps: if isinstance(ss, int): # fetch actual sourcestamp and changes from data API properties.sourcestamps.append( (yield self.master.data.get(('sourcestamps', ss)))) properties.changes.extend( (yield self.master.data.get(('sourcestamps', ss, 'changes')))) else: # sourcestamp with no change, see addBuildsetForChanges properties.sourcestamps.append(ss) for c in properties.changes: properties.updateFromProperties(Properties.fromDict(c['properties'])) # apply the default builderNames if not builderNames: builderNames = self.builderNames # dynamically get the builder list to schedule builderNames = yield properties.render(builderNames) # Get the builder ids # Note that there is a data.updates.findBuilderId(name) # but that would merely only optimize the single builder case, while # probably the multiple builder case will be severely impacted by the # several db requests needed. builderids = list() for bldr in (yield self.master.data.get(('builders', ))): if bldr['name'] in builderNames: builderids.append(bldr['builderid']) # translate properties object into a dict as required by the # addBuildset method properties_dict = yield properties.render(properties.asDict()) bsid, brids = yield self.master.data.updates.addBuildset( scheduler=self.name, sourcestamps=sourcestamps, reason=reason, waited_for=waited_for, properties=properties_dict, builderids=builderids, external_idstring=external_idstring, **kw) return (bsid, brids) buildbot-2.6.0/master/buildbot/schedulers/basic.py000066400000000000000000000251731361162603000222240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot import config from buildbot import util from buildbot.changes import changes from buildbot.changes.filter import ChangeFilter from buildbot.schedulers import base from buildbot.schedulers import dependent from buildbot.util import NotABranch from buildbot.util.codebase import AbsoluteSourceStampsMixin class BaseBasicScheduler(base.BaseScheduler): """ @param onlyImportant: If True, only important changes will be added to the buildset. @type onlyImportant: boolean """ compare_attrs = ('treeStableTimer', 'change_filter', 'fileIsImportant', 'onlyImportant', 'reason') _reactor = reactor # for tests fileIsImportant = None reason = '' class NotSet: pass def __init__(self, name, shouldntBeSet=NotSet, treeStableTimer=None, builderNames=None, branch=NotABranch, branches=NotABranch, fileIsImportant=None, categories=None, reason="The %(classname)s scheduler named '%(name)s' triggered this build", change_filter=None, onlyImportant=False, **kwargs): if shouldntBeSet is not self.NotSet: config.error( "pass arguments to schedulers using keyword arguments") if fileIsImportant and not callable(fileIsImportant): config.error( "fileIsImportant must be a callable") # initialize parent classes super().__init__(name, builderNames, **kwargs) self.treeStableTimer = treeStableTimer if fileIsImportant is not None: self.fileIsImportant = fileIsImportant self.onlyImportant = onlyImportant self.change_filter = self.getChangeFilter(branch=branch, branches=branches, change_filter=change_filter, categories=categories) # the IDelayedCall used to wake up when this scheduler's # treeStableTimer expires. self._stable_timers = defaultdict(lambda: None) self._stable_timers_lock = defer.DeferredLock() self.reason = util.bytes2unicode(reason % { 'name': name, 'classname': self.__class__.__name__ }) def getChangeFilter(self, branch, branches, change_filter, categories): raise NotImplementedError @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return yield self.startConsumingChanges(fileIsImportant=self.fileIsImportant, change_filter=self.change_filter, onlyImportant=self.onlyImportant) # if we have a treeStableTimer, if there are classified changes # out there, start their timers again if self.treeStableTimer: yield self.scanExistingClassifiedChanges() # otherwise, we don't care about classified # changes, so get rid of any hanging around from previous # configurations else: yield self.master.db.schedulers.flushChangeClassifications(self.serviceid) @defer.inlineCallbacks def deactivate(self): # the base deactivate will unsubscribe from new changes yield super().deactivate() if not self.enabled: return @util.deferredLocked(self._stable_timers_lock) def cancel_timers(): for timer in self._stable_timers.values(): if timer: timer.cancel() self._stable_timers.clear() yield cancel_timers() @util.deferredLocked('_stable_timers_lock') def gotChange(self, change, important): if not self.treeStableTimer: # if there's no treeStableTimer, we can completely ignore # unimportant changes if not important: return defer.succeed(None) # otherwise, we'll build it right away return self.addBuildsetForChanges(reason=self.reason, changeids=[change.number]) timer_name = self.getTimerNameForChange(change) # if we have a treeStableTimer # - for an important change, start the timer # - for an unimportant change, reset the timer if it is running if important or self._stable_timers[timer_name]: if self._stable_timers[timer_name]: self._stable_timers[timer_name].cancel() def fire_timer(): d = self.stableTimerFired(timer_name) d.addErrback(log.err, "while firing stable timer") self._stable_timers[timer_name] = self._reactor.callLater( self.treeStableTimer, fire_timer) # record the change's importance return self.master.db.schedulers.classifyChanges( self.serviceid, {change.number: important}) @defer.inlineCallbacks def scanExistingClassifiedChanges(self): # call gotChange for each classified change. This is called at startup # and is intended to re-start the treeStableTimer for any changes that # had not yet been built when the scheduler was stopped. # NOTE: this may double-call gotChange for changes that arrive just as # the scheduler starts up. In practice, this doesn't hurt anything. classifications = \ yield self.master.db.schedulers.getChangeClassifications(self.serviceid) # call gotChange for each change, after first fetching it from the db for changeid, important in classifications.items(): chdict = yield self.master.db.changes.getChange(changeid) if not chdict: continue change = yield changes.Change.fromChdict(self.master, chdict) yield self.gotChange(change, important) def getTimerNameForChange(self, change): raise NotImplementedError # see subclasses def getChangeClassificationsForTimer(self, sched_id, timer_name): """similar to db.schedulers.getChangeClassifications, but given timer name""" raise NotImplementedError # see subclasses @util.deferredLocked('_stable_timers_lock') @defer.inlineCallbacks def stableTimerFired(self, timer_name): # delete this now-fired timer, if the service has already been stopped # then just bail out if not self._stable_timers.pop(timer_name, None): return classifications = \ yield self.getChangeClassificationsForTimer(self.serviceid, timer_name) # just in case: databases do weird things sometimes! if not classifications: # pragma: no cover return changeids = sorted(classifications.keys()) yield self.addBuildsetForChanges(reason=self.reason, changeids=changeids) max_changeid = changeids[-1] # (changeids are sorted) yield self.master.db.schedulers.flushChangeClassifications( self.serviceid, less_than=max_changeid + 1) class SingleBranchScheduler(AbsoluteSourceStampsMixin, BaseBasicScheduler): def __init__(self, name, createAbsoluteSourceStamps=False, **kwargs): self.createAbsoluteSourceStamps = createAbsoluteSourceStamps super().__init__(name, **kwargs) @defer.inlineCallbacks def gotChange(self, change, important): if self.createAbsoluteSourceStamps: yield self.recordChange(change) yield super().gotChange(change, important) def getCodebaseDict(self, codebase): if self.createAbsoluteSourceStamps: return super().getCodebaseDict(codebase) return self.codebases[codebase] def getChangeFilter(self, branch, branches, change_filter, categories): if branch is NotABranch and not change_filter: config.error( "the 'branch' argument to SingleBranchScheduler is " + "mandatory unless change_filter is provided") elif branches is not NotABranch: config.error( "the 'branches' argument is not allowed for " + "SingleBranchScheduler") return ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter, branch=branch, categories=categories) def getTimerNameForChange(self, change): return "only" # this class only uses one timer def getChangeClassificationsForTimer(self, sched_id, timer_name): return self.master.db.schedulers.getChangeClassifications(sched_id) class Scheduler(SingleBranchScheduler): "alias for SingleBranchScheduler" def __init__(self, *args, **kwargs): log.msg("WARNING: the name 'Scheduler' is deprecated; use " + "buildbot.schedulers.basic.SingleBranchScheduler instead " + "(note that this may require you to change your import " + "statement)") super().__init__(*args, **kwargs) class AnyBranchScheduler(BaseBasicScheduler): def getChangeFilter(self, branch, branches, change_filter, categories): assert branch is NotABranch return ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter, branch=branches, categories=categories) def getTimerNameForChange(self, change): # Py2.6+: could be a namedtuple return (change.codebase, change.project, change.repository, change.branch) def getChangeClassificationsForTimer(self, sched_id, timer_name): # set in getTimerNameForChange codebase, project, repository, branch = timer_name return self.master.db.schedulers.getChangeClassifications( sched_id, branch=branch, repository=repository, codebase=codebase, project=project) # now at buildbot.schedulers.dependent, but keep the old name alive Dependent = dependent.Dependent buildbot-2.6.0/master/buildbot/schedulers/dependent.py000066400000000000000000000142501361162603000231030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot import interfaces from buildbot import util from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.schedulers import base class Dependent(base.BaseScheduler): compare_attrs = ('upstream_name',) def __init__(self, name, upstream, builderNames, **kwargs): super().__init__(name, builderNames, **kwargs) if not interfaces.IScheduler.providedBy(upstream): config.error( "upstream must be another Scheduler instance") self.upstream_name = upstream.name self._buildset_new_consumer = None self._buildset_complete_consumer = None self._cached_upstream_bsids = None # the subscription lock makes sure that we're done inserting a # subscription into the DB before registering that the buildset is # complete. self._subscription_lock = defer.DeferredLock() @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return self._buildset_new_consumer = yield self.master.mq.startConsuming( self._buildset_new_cb, ('buildsets', None, 'new')) # TODO: refactor to subscribe only to interesting buildsets, and # subscribe to them directly, via the data API self._buildset_complete_consumer = yield self.master.mq.startConsuming( self._buildset_complete_cb, ('buildsets', None, 'complete')) # check for any buildsets completed before we started yield self._checkCompletedBuildsets(None, ) @defer.inlineCallbacks def deactivate(self): # the base deactivate will unsubscribe from new changes yield super().deactivate() if not self.enabled: return if self._buildset_new_consumer: self._buildset_new_consumer.stopConsuming() if self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._cached_upstream_bsids = None @util.deferredLocked('_subscription_lock') def _buildset_new_cb(self, key, msg): # check if this was submitted by our upstream if msg['scheduler'] != self.upstream_name: return # record our interest in this buildset return self._addUpstreamBuildset(msg['bsid']) def _buildset_complete_cb(self, key, msg): return self._checkCompletedBuildsets(msg['bsid']) @util.deferredLocked('_subscription_lock') @defer.inlineCallbacks def _checkCompletedBuildsets(self, bsid): subs = yield self._getUpstreamBuildsets() sub_bsids = [] for (sub_bsid, sub_ssids, sub_complete, sub_results) in subs: # skip incomplete builds, handling the case where the 'complete' # column has not been updated yet if not sub_complete and sub_bsid != bsid: continue # build a dependent build if the status is appropriate. Note that # this uses the sourcestamps from the buildset, not from any of the # builds performed to complete the buildset (since those might # differ from one another) if sub_results in (SUCCESS, WARNINGS): yield self.addBuildsetForSourceStamps( sourcestamps=sub_ssids.copy(), reason='downstream') sub_bsids.append(sub_bsid) # and regardless of status, remove the subscriptions yield self._removeUpstreamBuildsets(sub_bsids) @defer.inlineCallbacks def _updateCachedUpstreamBuilds(self): if self._cached_upstream_bsids is None: bsids = yield self.master.db.state.getState(self.objectid, 'upstream_bsids', []) self._cached_upstream_bsids = bsids @defer.inlineCallbacks def _getUpstreamBuildsets(self): # get a list of (bsid, ssids, complete, results) for all # upstream buildsets yield self._updateCachedUpstreamBuilds() changed = False rv = [] for bsid in self._cached_upstream_bsids[:]: buildset = yield self.master.data.get(('buildsets', str(bsid))) if not buildset: self._cached_upstream_bsids.remove(bsid) changed = True continue ssids = [ss['ssid'] for ss in buildset['sourcestamps']] rv.append((bsid, ssids, buildset['complete'], buildset['results'])) if changed: yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) return rv @defer.inlineCallbacks def _addUpstreamBuildset(self, bsid): yield self._updateCachedUpstreamBuilds() if bsid not in self._cached_upstream_bsids: self._cached_upstream_bsids.append(bsid) yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) @defer.inlineCallbacks def _removeUpstreamBuildsets(self, bsids): yield self._updateCachedUpstreamBuilds() old = set(self._cached_upstream_bsids) self._cached_upstream_bsids = list(old - set(bsids)) yield self.master.db.state.setState(self.objectid, 'upstream_bsids', self._cached_upstream_bsids) buildbot-2.6.0/master/buildbot/schedulers/filter.py000066400000000000000000000015261361162603000224240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # old (pre-0.8.4) location for ChangeFilter from buildbot.changes.filter import ChangeFilter _hush_pyflakes = ChangeFilter # keep pyflakes happy buildbot-2.6.0/master/buildbot/schedulers/forcesched.py000066400000000000000000000743571361162603000232600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import traceback from twisted.internet import defer from twisted.python.reflect import accumulateClassList from buildbot import config from buildbot.process.properties import Properties from buildbot.reporters.mail import VALID_EMAIL_ADDR from buildbot.schedulers import base from buildbot.util import identifiers class ValidationError(ValueError): pass class CollectedValidationError(ValueError): def __init__(self, errors): self.errors = errors super().__init__("\n".join([k + ":" + v for k, v in errors.items()])) class ValidationErrorCollector: def __init__(self): self.errors = {} @defer.inlineCallbacks def collectValidationErrors(self, name, fn, *args, **kwargs): res = None try: res = yield defer.maybeDeferred(fn, *args, **kwargs) except CollectedValidationError as e: for error_name, e in e.errors.items(): self.errors[error_name] = e except ValueError as e: self.errors[name] = str(e) return res def maybeRaiseCollectedErrors(self): errors = self.errors if errors: raise CollectedValidationError(errors) DefaultField = object() # sentinel object to signal default behavior class BaseParameter: """ BaseParameter provides a base implementation for property customization """ spec_attributes = ["name", "fullName", "label", "tablabel", "type", "default", "required", "multiple", "regex", "hide", "maxsize", "autopopulate"] name = "" parentName = None label = "" tablabel = "" type = "" default = "" required = False multiple = False regex = None debug = True hide = False maxsize = None autopopulate = None @property def fullName(self): """A full name, intended to uniquely identify a parameter""" # join with '_' if both are set (cannot put '.', because it is used as # **kwargs) if self.parentName and self.name: return self.parentName + '_' + self.name # otherwise just use the one that is set # (this allows empty name for "anonymous nests") return self.name or self.parentName def setParent(self, parent): self.parentName = parent.fullName if parent else None def __init__(self, name, label=None, tablabel=None, regex=None, **kw): """ @param name: the name of the field, used during posting values back to the scheduler. This is not necessarily a UI value, and there may be restrictions on the characters allowed for this value. For example, HTML would require this field to avoid spaces and other punctuation ('-', '.', and '_' allowed) @type name: unicode @param label: (optional) the name of the field, used for UI display. @type label: unicode or None (to use 'name') @param regex: (optional) regex to validate the value with. Not used by all subclasses @type regex: unicode or regex """ if name in ["owner", "builderNames", "builderid"]: config.error( "%s cannot be used as a parameter name, because it is reserved" % (name,)) self.name = name self.label = name if label is None else label self.tablabel = self.label if tablabel is None else tablabel if regex: self.regex = re.compile(regex) if 'value' in kw: config.error("Use default='%s' instead of value=... to give a " "default Parameter value" % kw['value']) # all other properties are generically passed via **kw self.__dict__.update(kw) def getFromKwargs(self, kwargs): """Simple customization point for child classes that do not need the other parameters supplied to updateFromKwargs. Return the value for the property named 'self.name'. The default implementation converts from a list of items, validates using the optional regex field and calls 'parse_from_args' for the final conversion. """ args = kwargs.get(self.fullName, []) # delete white space for args for arg in args: if isinstance(arg, str) and not arg.strip(): args.remove(arg) if not args: if self.required: raise ValidationError( "'%s' needs to be specified" % (self.label)) if self.multiple: args = self.default else: args = [self.default] if self.regex: for arg in args: if not self.regex.match(arg): raise ValidationError("%s:'%s' does not match pattern '%s'" % (self.label, arg, self.regex.pattern)) if self.maxsize is not None: for arg in args: if len(arg) > self.maxsize: raise ValidationError("%s: is too large %d > %d" % (self.label, len(arg), self.maxsize)) try: arg = self.parse_from_args(args) except Exception as e: # an exception will just display an alert in the web UI # also log the exception if self.debug: traceback.print_exc() raise e if arg is None: raise ValidationError("need %s: no default provided by config" % (self.fullName,)) return arg def updateFromKwargs(self, properties, kwargs, collector, **unused): """Primary entry point to turn 'kwargs' into 'properties'""" properties[self.name] = self.getFromKwargs(kwargs) def parse_from_args(self, l): """Secondary customization point, called from getFromKwargs to turn a validated value into a single property value""" if self.multiple: return [self.parse_from_arg(arg) for arg in l] return self.parse_from_arg(l[0]) def parse_from_arg(self, s): return s def getSpec(self): spec_attributes = [] accumulateClassList(self.__class__, 'spec_attributes', spec_attributes) ret = {} for i in spec_attributes: ret[i] = getattr(self, i) return ret class FixedParameter(BaseParameter): """A fixed parameter that cannot be modified by the user.""" type = "fixed" hide = True default = "" def parse_from_args(self, l): return self.default class StringParameter(BaseParameter): """A simple string parameter""" spec_attributes = ["size"] type = "text" size = 10 def parse_from_arg(self, s): return s class TextParameter(StringParameter): """A generic string parameter that may span multiple lines""" spec_attributes = ["cols", "rows"] type = "textarea" cols = 80 rows = 20 def value_to_text(self, value): return str(value) class IntParameter(StringParameter): """An integer parameter""" type = "int" default = 0 parse_from_arg = int # will throw an exception if parse fail class BooleanParameter(BaseParameter): """A boolean parameter""" type = "bool" def getFromKwargs(self, kwargs): return kwargs.get(self.fullName, [self.default]) == [True] class UserNameParameter(StringParameter): """A username parameter to supply the 'owner' of a build""" spec_attributes = ["need_email"] type = "username" default = "" size = 30 need_email = True def __init__(self, name="username", label="Your name:", **kw): super().__init__(name, label, **kw) def parse_from_arg(self, s): if not s and not self.required: return s if self.need_email: res = VALID_EMAIL_ADDR.search(s) if res is None: raise ValidationError("%s: please fill in email address in the " "form 'User '" % (self.name,)) return s class ChoiceStringParameter(BaseParameter): """A list of strings, allowing the selection of one of the predefined values. The 'strict' parameter controls whether values outside the predefined list of choices are allowed""" spec_attributes = ["choices", "strict"] type = "list" choices = [] strict = True def parse_from_arg(self, s): if self.strict and s not in self.choices: raise ValidationError( "'%s' does not belong to list of available choices '%s'" % (s, self.choices)) return s def getChoices(self, master, scheduler, buildername): return self.choices class InheritBuildParameter(ChoiceStringParameter): """A parameter that takes its values from another build""" type = ChoiceStringParameter.type name = "inherit" compatible_builds = None def getChoices(self, master, scheduler, buildername): return self.compatible_builds(master.status, buildername) def getFromKwargs(self, kwargs): raise ValidationError( "InheritBuildParameter can only be used by properties") def updateFromKwargs(self, master, properties, changes, kwargs, **unused): arg = kwargs.get(self.fullName, [""])[0] split_arg = arg.split(" ")[0].split("/") if len(split_arg) != 2: raise ValidationError("bad build: %s" % (arg)) builder, num = split_arg builder_status = master.status.getBuilder(builder) if not builder_status: raise ValidationError("unknown builder: %s in %s" % (builder, arg)) b = builder_status.getBuild(int(num)) if not b: raise ValidationError("unknown build: %d in %s" % (num, arg)) props = {self.name: (arg.split(" ")[0])} for name, value, source in b.getProperties().asList(): if source == "Force Build Form": if name == "owner": name = "orig_owner" props[name] = value properties.update(props) changes.extend(b.changes) class WorkerChoiceParameter(ChoiceStringParameter): """A parameter that lets the worker name be explicitly chosen. This parameter works in conjunction with 'buildbot.process.builder.enforceChosenWorker', which should be added as the 'canStartBuild' parameter to the Builder. The "anySentinel" parameter represents the sentinel value to specify that there is no worker preference. """ anySentinel = '-any-' label = 'Worker' required = False strict = False def __init__(self, name='workername', **kwargs): super().__init__(name, **kwargs) def updateFromKwargs(self, kwargs, **unused): workername = self.getFromKwargs(kwargs) if workername == self.anySentinel: # no preference, so don't set a parameter at all return super().updateFromKwargs(kwargs=kwargs, **unused) def getChoices(self, master, scheduler, buildername): if buildername is None: # this is the "Force All Builds" page workernames = master.status.getWorkerNames() else: builderStatus = master.status.getBuilder(buildername) workernames = [worker.getName() for worker in builderStatus.getWorkers()] workernames.sort() workernames.insert(0, self.anySentinel) return workernames class FileParameter(BaseParameter): """A parameter which allows to download a whole file and store it as a property or patch """ type = 'file' maxsize = 1024 * 1024 * 10 # 10M class NestedParameter(BaseParameter): """A 'parent' parameter for a set of related parameters. This provides a logical grouping for the child parameters. Typically, the 'fullName' of the child parameters mix in the parent's 'fullName'. This allows for a field to appear multiple times in a form (for example, two codebases each have a 'branch' field). If the 'name' of the parent is the empty string, then the parent's name does not mix in with the child 'fullName'. This is useful when a field will not appear multiple time in a scheduler but the logical grouping is helpful. The result of a NestedParameter is typically a dictionary, with the key/value being the name/value of the children. """ spec_attributes = [ "layout", "columns"] # field is recursive, and thus managed in custom getSpec type = 'nested' layout = 'vertical' fields = None columns = None def __init__(self, name, fields, **kwargs): super().__init__(fields=fields, name=name, **kwargs) # reasonable defaults for the number of columns if self.columns is None: num_visible_fields = len( [field for field in fields if not field.hide]) if num_visible_fields >= 4: self.columns = 2 else: self.columns = 1 if self.columns > 4: config.error( "UI only support up to 4 columns in nested parameters") # fix up the child nodes with the parent (use None for now): self.setParent(None) def setParent(self, parent): super().setParent(parent) for field in self.fields: # pylint: disable=not-an-iterable field.setParent(self) @defer.inlineCallbacks def collectChildProperties(self, kwargs, properties, collector, **kw): """Collapse the child values into a dictionary. This is intended to be called by child classes to fix up the fullName->name conversions.""" childProperties = {} for field in self.fields: # pylint: disable=not-an-iterable yield collector.collectValidationErrors(field.fullName, field.updateFromKwargs, kwargs=kwargs, properties=childProperties, collector=collector, **kw) kwargs[self.fullName] = childProperties @defer.inlineCallbacks def updateFromKwargs(self, kwargs, properties, collector, **kw): """By default, the child values will be collapsed into a dictionary. If the parent is anonymous, this dictionary is the top-level properties.""" yield self.collectChildProperties(kwargs=kwargs, properties=properties, collector=collector, **kw) # default behavior is to set a property # -- use setdefault+update in order to collapse 'anonymous' nested # parameters correctly if self.name: d = properties.setdefault(self.name, {}) else: # if there's no name, collapse this nest all the way d = properties d.update(kwargs[self.fullName]) def getSpec(self): ret = super().getSpec() # pylint: disable=not-an-iterable ret['fields'] = [field.getSpec() for field in self.fields] return ret ParameterGroup = NestedParameter class AnyPropertyParameter(NestedParameter): """A generic property parameter, where both the name and value of the property must be given.""" type = NestedParameter.type def __init__(self, name, **kw): fields = [ StringParameter(name='name', label="Name:"), StringParameter(name='value', label="Value:"), ] super().__init__(name, label='', fields=fields, **kw) def getFromKwargs(self, kwargs): raise ValidationError( "AnyPropertyParameter can only be used by properties") @defer.inlineCallbacks def updateFromKwargs(self, master, properties, kwargs, collector, **kw): yield self.collectChildProperties(master=master, properties=properties, kwargs=kwargs, collector=collector, **kw) pname = kwargs[self.fullName].get("name", "") pvalue = kwargs[self.fullName].get("value", "") if not pname: return validation = master.config.validation pname_validate = validation['property_name'] pval_validate = validation['property_value'] if not pname_validate.match(pname) \ or not pval_validate.match(pvalue): raise ValidationError( "bad property name='%s', value='%s'" % (pname, pvalue)) properties[pname] = pvalue class CodebaseParameter(NestedParameter): """A parameter whose result is a codebase specification instead of a property""" type = NestedParameter.type codebase = '' def __init__(self, codebase, name=None, label=None, branch=DefaultField, revision=DefaultField, repository=DefaultField, project=DefaultField, patch=None, **kwargs): """ A set of properties that will be used to generate a codebase dictionary. The branch/revision/repository/project should each be a parameter that will map to the corresponding value in the sourcestamp. Use None to disable the field. @param codebase: name of the codebase; used as key for the sourcestamp set @type codebase: unicode @param name: optional override for the name-currying for the subfields @type codebase: unicode @param label: optional override for the label for this set of parameters @type codebase: unicode """ name = name or codebase if label is None and codebase: label = "Codebase: " + codebase fields_dict = dict(branch=branch, revision=revision, repository=repository, project=project) for k, v in fields_dict.items(): if v is DefaultField: v = StringParameter(name=k, label=k.capitalize() + ":") elif isinstance(v, str): v = FixedParameter(name=k, default=v) fields_dict[k] = v fields = [val for k, val in sorted(fields_dict.items(), key=lambda x: x[0]) if val] if patch is not None: if patch.name != "patch": config.error( "patch parameter of a codebase must be named 'patch'") fields.append(patch) if self.columns is None and 'columns' not in kwargs: self.columns = 1 super().__init__(name=name, label=label, codebase=codebase, fields=fields, **kwargs) def createSourcestamp(self, properties, kwargs): # default, just return the things we put together return kwargs.get(self.fullName, {}) @defer.inlineCallbacks def updateFromKwargs(self, sourcestamps, kwargs, properties, collector, **kw): yield self.collectChildProperties(sourcestamps=sourcestamps, properties=properties, kwargs=kwargs, collector=collector, **kw) # convert the "property" to a sourcestamp ss = self.createSourcestamp(properties, kwargs) if ss is not None: patch = ss.pop('patch', None) if patch is not None: for k, v in patch.items(): ss['patch_' + k] = v sourcestamps[self.codebase] = ss def oneCodebase(**kw): return [CodebaseParameter('', **kw)] class PatchParameter(NestedParameter): """A patch parameter contains pre-configure UI for all the needed components for a sourcestamp patch """ columns = 1 def __init__(self, **kwargs): name = kwargs.pop('name', 'patch') default_fields = [ FileParameter('body'), IntParameter('level', default=1), StringParameter('author', default=""), StringParameter('comment', default=""), StringParameter('subdir', default=".") ] fields = [ kwargs.pop(field.name, field) for field in default_fields ] super().__init__(name, fields=fields, **kwargs) class ForceScheduler(base.BaseScheduler): """ ForceScheduler implements the backend for a UI to allow customization of builds. For example, a web form be populated to trigger a build. """ compare_attrs = base.BaseScheduler.compare_attrs + \ ('builderNames', 'reason', 'username', 'forcedProperties') def __init__(self, name, builderNames, username=UserNameParameter(), reason=StringParameter( name="reason", default="force build", size=20), reasonString="A build was forced by '%(owner)s': %(reason)s", buttonName=None, codebases=None, label=None, properties=None): """ Initialize a ForceScheduler. The UI will provide a set of fields to the user; these fields are driven by a corresponding child class of BaseParameter. Use NestedParameter to provide logical groupings for parameters. The branch/revision/repository/project fields are deprecated and provided only for backwards compatibility. Using a Codebase(name='') will give the equivalent behavior. @param name: name of this scheduler (used as a key for state) @type name: unicode @param builderNames: list of builders this scheduler may start @type builderNames: list of unicode @param username: the "owner" for a build (may not be shown depending on the Auth configuration for the master) @type username: BaseParameter @param reason: the "reason" for a build @type reason: BaseParameter @param codebases: the codebases for a build @type codebases: list of string's or CodebaseParameter's; None will generate a default, but [] will remove all codebases @param properties: extra properties to configure the build @type properties: list of BaseParameter's """ if not self.checkIfType(name, str): config.error("ForceScheduler name must be a unicode string: %r" % name) if not name: config.error("ForceScheduler name must not be empty: %r" % name) if not identifiers.ident_re.match(name): config.error("ForceScheduler name must be an identifier: %r" % name) if not self.checkIfListOfType(builderNames, (str,)): config.error("ForceScheduler '%s': builderNames must be a list of strings: %r" % (name, builderNames)) if self.checkIfType(reason, BaseParameter): self.reason = reason else: config.error("ForceScheduler '%s': reason must be a StringParameter: %r" % (name, reason)) if properties is None: properties = [] if not self.checkIfListOfType(properties, BaseParameter): config.error("ForceScheduler '%s': properties must be a list of BaseParameters: %r" % (name, properties)) if self.checkIfType(username, BaseParameter): self.username = username else: config.error("ForceScheduler '%s': username must be a StringParameter: %r" % (name, username)) self.forcedProperties = [] self.label = name if label is None else label # Use the default single codebase form if none are provided if codebases is None: codebases = [CodebaseParameter(codebase='')] elif not codebases: config.error("ForceScheduler '%s': 'codebases' cannot be empty;" " use [CodebaseParameter(codebase='', hide=True)] if needed: %r " % ( name, codebases)) elif not isinstance(codebases, list): config.error("ForceScheduler '%s': 'codebases' should be a list of strings or CodebaseParameter," " not %s" % ( name, type(codebases))) codebase_dict = {} for codebase in codebases: if isinstance(codebase, str): codebase = CodebaseParameter(codebase=codebase) elif not isinstance(codebase, CodebaseParameter): config.error("ForceScheduler '%s': 'codebases' must be a list of strings" " or CodebaseParameter objects: %r" % ( name, codebases)) self.forcedProperties.append(codebase) codebase_dict[codebase.codebase] = dict( branch='', repository='', revision='') super().__init__(name=name, builderNames=builderNames, properties={}, codebases=codebase_dict) if properties: self.forcedProperties.extend(properties) # this is used to simplify the template self.all_fields = [NestedParameter(name='', fields=[username, reason])] self.all_fields.extend(self.forcedProperties) self.reasonString = reasonString self.buttonName = buttonName or name def checkIfType(self, obj, chkType): return isinstance(obj, chkType) def checkIfListOfType(self, obj, chkType): isListOfType = True if self.checkIfType(obj, list): for item in obj: if not self.checkIfType(item, chkType): isListOfType = False break else: isListOfType = False return isListOfType @defer.inlineCallbacks def gatherPropertiesAndChanges(self, collector, **kwargs): properties = {} changeids = [] sourcestamps = {} for param in self.forcedProperties: yield collector.collectValidationErrors(param.fullName, param.updateFromKwargs, master=self.master, properties=properties, changes=changeids, sourcestamps=sourcestamps, collector=collector, kwargs=kwargs) changeids = [type(a) == int and a or a.number for a in changeids] real_properties = Properties() for pname, pvalue in properties.items(): real_properties.setProperty(pname, pvalue, "Force Build Form") return (real_properties, changeids, sourcestamps) @defer.inlineCallbacks def computeBuilderNames(self, builderNames=None, builderid=None): if builderNames is None: if builderid is not None: builder = yield self.master.data.get(('builders', str(builderid))) builderNames = [builder['name']] else: builderNames = self.builderNames else: builderNames = sorted( set(builderNames).intersection(self.builderNames)) return builderNames @defer.inlineCallbacks def force(self, owner, builderNames=None, builderid=None, **kwargs): """ We check the parameters, and launch the build, if everything is correct """ builderNames = yield self.computeBuilderNames(builderNames, builderid) if not builderNames: raise KeyError("builderNames not specified or not supported") # Currently the validation code expects all kwargs to be lists # I don't want to refactor that now so much sure we comply... kwargs = dict((k, [v]) if not isinstance(v, list) else (k, v) for k, v in kwargs.items()) # probably need to clean that out later as the IProperty is already a # validation mechanism collector = ValidationErrorCollector() reason = yield collector.collectValidationErrors(self.reason.fullName, self.reason.getFromKwargs, kwargs) if owner is None or owner == "anonymous": owner = yield collector.collectValidationErrors(self.username.fullName, self.username.getFromKwargs, kwargs) properties, changeids, sourcestamps = yield self.gatherPropertiesAndChanges( collector, **kwargs) collector.maybeRaiseCollectedErrors() properties.setProperty("reason", reason, "Force Build Form") properties.setProperty("owner", owner, "Force Build Form") r = self.reasonString % {'owner': owner, 'reason': reason} # turn sourcestamps into a list for cb, ss in sourcestamps.items(): ss['codebase'] = cb sourcestamps = list(sourcestamps.values()) # everything is validated, we can create our source stamp, and # buildrequest res = yield self.addBuildsetForSourceStampsWithDefaults( reason=r, sourcestamps=sourcestamps, properties=properties, builderNames=builderNames, ) return res buildbot-2.6.0/master/buildbot/schedulers/manager.py000066400000000000000000000016601361162603000225500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.measured_service import MeasuredBuildbotServiceManager class SchedulerManager(MeasuredBuildbotServiceManager): name = "SchedulerManager" managed_services_name = "schedulers" config_attr = "schedulers" buildbot-2.6.0/master/buildbot/schedulers/timed.py000066400000000000000000000424451361162603000222460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from zope.interface import implementer from buildbot import config from buildbot import util from buildbot.changes.filter import ChangeFilter from buildbot.interfaces import ITriggerableScheduler from buildbot.process import buildstep from buildbot.process import properties from buildbot.schedulers import base from buildbot.util import croniter from buildbot.util.codebase import AbsoluteSourceStampsMixin class Timed(AbsoluteSourceStampsMixin, base.BaseScheduler): """ Parent class for timed schedulers. This takes care of the (surprisingly subtle) mechanics of ensuring that each timed actuation runs to completion before the service stops. """ compare_attrs = ('reason', 'createAbsoluteSourceStamps', 'onlyIfChanged', 'branch', 'fileIsImportant', 'change_filter', 'onlyImportant') reason = '' class NoBranch: pass def __init__(self, name, builderNames, reason='', createAbsoluteSourceStamps=False, onlyIfChanged=False, branch=NoBranch, change_filter=None, fileIsImportant=None, onlyImportant=False, **kwargs): super().__init__(name, builderNames, **kwargs) # tracking for when to start the next build self.lastActuated = None # A lock to make sure that each actuation occurs without interruption. # This lock governs actuateAt, actuateAtTimer, and actuateOk self.actuationLock = defer.DeferredLock() self.actuateOk = False self.actuateAt = None self.actuateAtTimer = None self.reason = util.bytes2unicode(reason % {'name': name}) self.branch = branch self.change_filter = ChangeFilter.fromSchedulerConstructorArgs( change_filter=change_filter) self.createAbsoluteSourceStamps = createAbsoluteSourceStamps self.onlyIfChanged = onlyIfChanged if fileIsImportant and not callable(fileIsImportant): config.error( "fileIsImportant must be a callable") self.fileIsImportant = fileIsImportant # If True, only important changes will be added to the buildset. self.onlyImportant = onlyImportant self._reactor = reactor # patched by tests @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return None # no need to lock this # nothing else can run before the service is started self.actuateOk = True # get the scheduler's last_build time (note: only done at startup) self.lastActuated = yield self.getState('last_build', None) # schedule the next build yield self.scheduleNextBuild() if self.onlyIfChanged or self.createAbsoluteSourceStamps: yield self.startConsumingChanges(fileIsImportant=self.fileIsImportant, change_filter=self.change_filter, onlyImportant=self.onlyImportant) else: yield self.master.db.schedulers.flushChangeClassifications(self.serviceid) @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return None # shut down any pending actuation, and ensure that we wait for any # current actuation to complete by acquiring the lock. This ensures # that no build will be scheduled after deactivate is complete. def stop_actuating(): self.actuateOk = False self.actuateAt = None if self.actuateAtTimer: self.actuateAtTimer.cancel() self.actuateAtTimer = None yield self.actuationLock.run(stop_actuating) # Scheduler methods def gotChange(self, change, important): # both important and unimportant changes on our branch are recorded, as # we will include all such changes in any buildsets we start. Note # that we must check the branch here because it is not included in the # change filter. if self.branch is not Timed.NoBranch and change.branch != self.branch: return defer.succeed(None) # don't care about this change d = self.master.db.schedulers.classifyChanges( self.serviceid, {change.number: important}) if self.createAbsoluteSourceStamps: d.addCallback(lambda _: self.recordChange(change)) return d @defer.inlineCallbacks def startBuild(self): if not self.enabled: log.msg(format='ignoring build from %(name)s because scheduler ' 'has been disabled by the user', name=self.name) return # use the collected changes to start a build scheds = self.master.db.schedulers classifications = yield scheds.getChangeClassifications(self.serviceid) # if onlyIfChanged is True, then we will skip this build if no # important changes have occurred since the last invocation if self.onlyIfChanged and not any(classifications.values()): log.msg(("%s scheduler <%s>: skipping build " + "- No important changes") % (self.__class__.__name__, self.name)) return changeids = sorted(classifications.keys()) if changeids: max_changeid = changeids[-1] # (changeids are sorted) yield self.addBuildsetForChanges(reason=self.reason, changeids=changeids) yield scheds.flushChangeClassifications(self.serviceid, less_than=max_changeid + 1) else: # There are no changes, but onlyIfChanged is False, so start # a build of the latest revision, whatever that is sourcestamps = [dict(codebase=cb) for cb in self.codebases] yield self.addBuildsetForSourceStampsWithDefaults( reason=self.reason, sourcestamps=sourcestamps) def getCodebaseDict(self, codebase): if self.createAbsoluteSourceStamps: return super().getCodebaseDict(codebase) return self.codebases[codebase] # Timed methods def getNextBuildTime(self, lastActuation): """ Called by to calculate the next time to actuate a BuildSet. Override in subclasses. To trigger a fresh call to this method, use L{rescheduleNextBuild}. @param lastActuation: the time of the last actuation, or None for never @returns: a Deferred firing with the next time a build should occur (in the future), or None for never. """ raise NotImplementedError def scheduleNextBuild(self): """ Schedule the next build, re-invoking L{getNextBuildTime}. This can be called at any time, and it will avoid contention with builds being started concurrently. @returns: Deferred """ return self.actuationLock.run(self._scheduleNextBuild_locked) # utilities def now(self): "Similar to util.now, but patchable by tests" return util.now(self._reactor) @defer.inlineCallbacks def _scheduleNextBuild_locked(self): # clear out the existing timer if self.actuateAtTimer: self.actuateAtTimer.cancel() self.actuateAtTimer = None # calculate the new time actuateAt = yield self.getNextBuildTime(self.lastActuated) if actuateAt is None: self.actuateAt = None else: # set up the new timer now = self.now() self.actuateAt = max(actuateAt, now) untilNext = self.actuateAt - now if untilNext == 0: log.msg(("%s scheduler <%s>: missed scheduled build time" " - building immediately") % (self.__class__.__name__, self.name)) self.actuateAtTimer = self._reactor.callLater(untilNext, self._actuate) @defer.inlineCallbacks def _actuate(self): # called from the timer when it's time to start a build self.actuateAtTimer = None self.lastActuated = self.actuateAt @defer.inlineCallbacks def set_state_and_start(): # bail out if we shouldn't be actuating anymore if not self.actuateOk: return # mark the last build time self.actuateAt = None yield self.setState('last_build', self.lastActuated) try: # start the build yield self.startBuild() except Exception as e: log.err(e, 'while actuating') finally: # schedule the next build (noting the lock is already held) yield self._scheduleNextBuild_locked() yield self.actuationLock.run(set_state_and_start) class Periodic(Timed): compare_attrs = ('periodicBuildTimer',) def __init__(self, name, builderNames, periodicBuildTimer, reason="The Periodic scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name, builderNames, reason=reason, **kwargs) if periodicBuildTimer <= 0: config.error("periodicBuildTimer must be positive") self.periodicBuildTimer = periodicBuildTimer def getNextBuildTime(self, lastActuated): if lastActuated is None: return defer.succeed(self.now()) # meaning "ASAP" return defer.succeed(lastActuated + self.periodicBuildTimer) class NightlyBase(Timed): compare_attrs = ('minute', 'hour', 'dayOfMonth', 'month', 'dayOfWeek') def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', **kwargs): super().__init__(name, builderNames, **kwargs) self.minute = minute self.hour = hour self.dayOfMonth = dayOfMonth self.month = month self.dayOfWeek = dayOfWeek def _timeToCron(self, time, isDayOfWeek=False): if isinstance(time, int): if isDayOfWeek: # Convert from Mon = 0 format to Sun = 0 format for use in # croniter time = (time + 1) % 7 return time if isinstance(time, str): if isDayOfWeek: # time could be a comma separated list of values, e.g. "5,sun" time_array = str(time).split(',') for i, time_val in enumerate(time_array): try: # try to convert value in place # Conversion for croniter (see above) time_array[i] = (int(time_val) + 1) % 7 except ValueError: # all non-int values are kept pass # Convert the list to a string return ','.join([str(s) for s in time_array]) return time if isDayOfWeek: # Conversion for croniter (see above) time = [(t + 1) % 7 for t in time] return ','.join([str(s) for s in time]) # Convert the list to a string def getNextBuildTime(self, lastActuated): dateTime = lastActuated or self.now() sched = '%s %s %s %s %s' % (self._timeToCron(self.minute), self._timeToCron(self.hour), self._timeToCron(self.dayOfMonth), self._timeToCron(self.month), self._timeToCron(self.dayOfWeek, True)) cron = croniter.croniter(sched, dateTime) nextdate = cron.get_next(float) return defer.succeed(nextdate) class Nightly(NightlyBase): def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', reason="The Nightly scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name=name, builderNames=builderNames, minute=minute, hour=hour, dayOfMonth=dayOfMonth, month=month, dayOfWeek=dayOfWeek, reason=reason, **kwargs) @implementer(ITriggerableScheduler) class NightlyTriggerable(NightlyBase): def __init__(self, name, builderNames, minute=0, hour='*', dayOfMonth='*', month='*', dayOfWeek='*', reason="The NightlyTriggerable scheduler named '%(name)s' triggered this build", **kwargs): super().__init__(name=name, builderNames=builderNames, minute=minute, hour=hour, dayOfMonth=dayOfMonth, month=month, dayOfWeek=dayOfWeek, reason=reason, **kwargs) self._lastTrigger = None @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return lastTrigger = yield self.getState('lastTrigger', None) self._lastTrigger = None if lastTrigger: try: if isinstance(lastTrigger[0], list): self._lastTrigger = (lastTrigger[0], properties.Properties.fromDict( lastTrigger[1]), lastTrigger[2], lastTrigger[3]) # handle state from before Buildbot-0.9.0 elif isinstance(lastTrigger[0], dict): self._lastTrigger = (list(lastTrigger[0].values()), properties.Properties.fromDict( lastTrigger[1]), None, None) except Exception: pass # If the lastTrigger isn't of the right format, ignore it if not self._lastTrigger: log.msg( format="NightlyTriggerable Scheduler <%(scheduler)s>: " "could not load previous state; starting fresh", scheduler=self.name) def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger this scheduler with the given sourcestamp ID. Returns a deferred that will fire when the buildset is finished.""" assert isinstance(sourcestamps, list), \ "trigger requires a list of sourcestamps" self._lastTrigger = (sourcestamps, set_props, parent_buildid, parent_relationship) if set_props: propsDict = set_props.asDict() else: propsDict = {} # record the trigger in the db d = self.setState('lastTrigger', (sourcestamps, propsDict, parent_buildid, parent_relationship)) # Trigger expects a callback with the success of the triggered # build, if waitForFinish is True. # Just return SUCCESS, to indicate that the trigger was successful, # don't wait for the nightly to run. return (defer.succeed((None, {})), d.addCallback(lambda _: buildstep.SUCCESS)) @defer.inlineCallbacks def startBuild(self): if not self.enabled: log.msg(format='ignoring build from %(name)s because scheduler ' 'has been disabled by the user', name=self.name) return if self._lastTrigger is None: return (sourcestamps, set_props, parent_buildid, parent_relationship) = self._lastTrigger self._lastTrigger = None yield self.setState('lastTrigger', None) # properties for this buildset are composed of our own properties, # potentially overridden by anything from the triggering build props = properties.Properties() props.updateFromProperties(self.properties) if set_props: props.updateFromProperties(set_props) yield self.addBuildsetForSourceStampsWithDefaults( reason=self.reason, sourcestamps=sourcestamps, properties=props, parent_buildid=parent_buildid, parent_relationship=parent_relationship) buildbot-2.6.0/master/buildbot/schedulers/triggerable.py000066400000000000000000000113421361162603000234230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from zope.interface import implementer from buildbot.interfaces import ITriggerableScheduler from buildbot.process.properties import Properties from buildbot.schedulers import base from buildbot.util import debounce @implementer(ITriggerableScheduler) class Triggerable(base.BaseScheduler): compare_attrs = base.BaseScheduler.compare_attrs + ('reason',) def __init__(self, name, builderNames, reason=None, **kwargs): super().__init__(name, builderNames, **kwargs) self._waiters = {} self._buildset_complete_consumer = None self.reason = reason def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): """Trigger this scheduler with the optional given list of sourcestamps Returns two deferreds: idsDeferred -- yields the ids of the buildset and buildrequest, as soon as they are available. resultsDeferred -- yields the build result(s), when they finish.""" # properties for this buildset are composed of our own properties, # potentially overridden by anything from the triggering build props = Properties() props.updateFromProperties(self.properties) reason = self.reason if set_props: props.updateFromProperties(set_props) reason = set_props.getProperty('reason') if reason is None: reason = "The Triggerable scheduler named '%s' triggered this build" % self.name # note that this does not use the buildset subscriptions mechanism, as # the duration of interest to the caller is bounded by the lifetime of # this process. idsDeferred = self.addBuildsetForSourceStampsWithDefaults( reason, sourcestamps, waited_for, properties=props, parent_buildid=parent_buildid, parent_relationship=parent_relationship) resultsDeferred = defer.Deferred() @idsDeferred.addCallback def setup_waiter(ids): bsid, brids = ids self._waiters[bsid] = (resultsDeferred, brids) self._updateWaiters() return ids return idsDeferred, resultsDeferred @defer.inlineCallbacks def startService(self): yield super().startService() self._updateWaiters.start() @defer.inlineCallbacks def stopService(self): # finish any _updateWaiters calls yield self._updateWaiters.stop() # cancel any outstanding subscription if self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._buildset_complete_consumer = None # and errback any outstanding deferreds if self._waiters: msg = 'Triggerable scheduler stopped before build was complete' for d, brids in self._waiters.values(): d.errback(failure.Failure(RuntimeError(msg))) self._waiters = {} yield super().stopService() @debounce.method(wait=0) @defer.inlineCallbacks def _updateWaiters(self): if self._waiters and not self._buildset_complete_consumer: startConsuming = self.master.mq.startConsuming self._buildset_complete_consumer = yield startConsuming( self._buildset_complete_cb, ('buildsets', None, 'complete')) elif not self._waiters and self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._buildset_complete_consumer = None def _buildset_complete_cb(self, key, msg): if msg['bsid'] not in self._waiters: return # pop this bsid from the waiters list, d, brids = self._waiters.pop(msg['bsid']) # ..and potentially stop consuming buildset completion notifications self._updateWaiters() # fire the callback to indicate that the triggered build is complete d.callback((msg['results'], brids)) buildbot-2.6.0/master/buildbot/schedulers/trysched.py000066400000000000000000000411521361162603000227630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os from twisted.internet import defer from twisted.protocols import basic from twisted.python import log from twisted.spread import pb from buildbot import pbutil from buildbot.process.properties import Properties from buildbot.schedulers import base from buildbot.util import bytes2unicode from buildbot.util import netstrings from buildbot.util.maildir import MaildirService class TryBase(base.BaseScheduler): def filterBuilderList(self, builderNames): """ Make sure that C{builderNames} is a subset of the configured C{self.builderNames}, returning an empty list if not. If C{builderNames} is empty, use C{self.builderNames}. @returns: list of builder names to build on """ # self.builderNames is the configured list of builders # available for try. If the user supplies a list of builders, # it must be restricted to the configured list. If not, build # on all of the configured builders. if builderNames: for b in builderNames: if b not in self.builderNames: log.msg("%s got with builder %s" % (self, b)) log.msg(" but that wasn't in our list: %s" % (self.builderNames,)) return [] else: builderNames = self.builderNames return builderNames class BadJobfile(Exception): pass class JobdirService(MaildirService): # NOTE: tightly coupled with Try_Jobdir, below. We used to track it as a "parent" # via the MultiService API, but now we just track it as the member # "self.scheduler" name = 'JobdirService' def __init__(self, scheduler, basedir=None): self.scheduler = scheduler super().__init__(basedir) def messageReceived(self, filename): with self.moveToCurDir(filename) as f: rv = self.scheduler.handleJobFile(filename, f) return rv class Try_Jobdir(TryBase): compare_attrs = ('jobdir',) def __init__(self, name, builderNames, jobdir, **kwargs): super().__init__(name, builderNames, **kwargs) self.jobdir = jobdir self.watcher = JobdirService(scheduler=self) # TryBase used to be a MultiService and managed the JobdirService via a parent/child # relationship. We stub out the addService/removeService and just keep track of # JobdirService as self.watcher. We'll refactor these things later and remove # the need for this. def addService(self, child): pass def removeService(self, child): pass # activation handlers @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return # set the watcher's basedir now that we have a master jobdir = os.path.join(self.master.basedir, self.jobdir) self.watcher.setBasedir(jobdir) for subdir in "cur new tmp".split(): if not os.path.exists(os.path.join(jobdir, subdir)): os.mkdir(os.path.join(jobdir, subdir)) # bridge the activate/deactivate to a startService/stopService on the # child service self.watcher.startService() @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return # bridge the activate/deactivate to a startService/stopService on the # child service self.watcher.stopService() def parseJob(self, f): # jobfiles are serialized build requests. Each is a list of # serialized netstrings, in the following order: # format version number: # "1" the original # "2" introduces project and repository # "3" introduces who # "4" introduces comment # "5" introduces properties and JSON serialization of values after # version # jobid: arbitrary string, used to find the buildSet later # branch: branch name, "" for default-branch # baserev: revision, "" for HEAD # patch_level: usually "1" # patch_body: patch to be applied for build # repository # project # who: user requesting build # comment: comment from user about diff and/or build # builderNames: list of builder names # properties: dict of build properties p = netstrings.NetstringParser() f.seek(0, 2) if f.tell() > basic.NetstringReceiver.MAX_LENGTH: raise BadJobfile( "The patch size is greater that NetStringReceiver.MAX_LENGTH. Please Set this higher in the master.cfg") f.seek(0, 0) try: p.feed(f.read()) except basic.NetstringParseError: raise BadJobfile("unable to parse netstrings") if not p.strings: raise BadJobfile("could not find any complete netstrings") ver = bytes2unicode(p.strings.pop(0)) v1_keys = ['jobid', 'branch', 'baserev', 'patch_level', 'patch_body'] v2_keys = v1_keys + ['repository', 'project'] v3_keys = v2_keys + ['who'] v4_keys = v3_keys + ['comment'] keys = [v1_keys, v2_keys, v3_keys, v4_keys] # v5 introduces properties and uses JSON serialization parsed_job = {} def extract_netstrings(p, keys): for i, key in enumerate(keys): parsed_job[key] = bytes2unicode(p.strings[i]) def postprocess_parsed_job(): # apply defaults and handle type casting parsed_job['branch'] = parsed_job['branch'] or None parsed_job['baserev'] = parsed_job['baserev'] or None parsed_job['patch_level'] = int(parsed_job['patch_level']) for key in 'repository project who comment'.split(): parsed_job[key] = parsed_job.get(key, '') parsed_job['properties'] = parsed_job.get('properties', {}) if ver <= "4": i = int(ver) - 1 extract_netstrings(p, keys[i]) parsed_job['builderNames'] = [bytes2unicode(s) for s in p.strings[len(keys[i]):]] postprocess_parsed_job() elif ver == "5": try: data = bytes2unicode(p.strings[0]) parsed_job = json.loads(data) except ValueError: raise BadJobfile("unable to parse JSON") postprocess_parsed_job() else: raise BadJobfile("unknown version '%s'" % ver) return parsed_job def handleJobFile(self, filename, f): try: parsed_job = self.parseJob(f) builderNames = parsed_job['builderNames'] except BadJobfile: log.msg("%s reports a bad jobfile in %s" % (self, filename)) log.err() return defer.succeed(None) # Validate/fixup the builder names. builderNames = self.filterBuilderList(builderNames) if not builderNames: log.msg( "incoming Try job did not specify any allowed builder names") return defer.succeed(None) who = "" if parsed_job['who']: who = parsed_job['who'] comment = "" if parsed_job['comment']: comment = parsed_job['comment'] sourcestamp = dict(branch=parsed_job['branch'], codebase='', revision=parsed_job['baserev'], patch_body=parsed_job['patch_body'], patch_level=parsed_job['patch_level'], patch_author=who, patch_comment=comment, # TODO: can't set this remotely - #1769 patch_subdir='', project=parsed_job['project'], repository=parsed_job['repository']) reason = "'try' job" if parsed_job['who']: reason += " by user {}".format(bytes2unicode(parsed_job['who'])) properties = parsed_job['properties'] requested_props = Properties() requested_props.update(properties, "try build") return self.addBuildsetForSourceStamps( sourcestamps=[sourcestamp], reason=reason, external_idstring=bytes2unicode(parsed_job['jobid']), builderNames=builderNames, properties=requested_props) class RemoteBuildSetStatus(pb.Referenceable): def __init__(self, master, bsid, brids): self.master = master self.bsid = bsid self.brids = brids @defer.inlineCallbacks def remote_getBuildRequests(self): brids = dict() for builderid, brid in self.brids.items(): builderDict = yield self.master.data.get(('builders', builderid)) brids[builderDict['name']] = brid return [(n, RemoteBuildRequest(self.master, n, brid)) for n, brid in brids.items()] class RemoteBuildRequest(pb.Referenceable): def __init__(self, master, builderName, brid): self.master = master self.builderName = builderName self.brid = brid self.consumer = None @defer.inlineCallbacks def remote_subscribe(self, subscriber): brdict = yield self.master.data.get(('buildrequests', self.brid)) if not brdict: return builderId = brdict['builderid'] # make sure we aren't double-reporting any builds reportedBuilds = set([]) # subscribe to any new builds.. def gotBuild(key, msg): if msg['buildrequestid'] != self.brid or key[-1] != 'new': return if msg['buildid'] in reportedBuilds: return reportedBuilds.add(msg['buildid']) return subscriber.callRemote('newbuild', RemoteBuild( self.master, msg, self.builderName), self.builderName) self.consumer = yield self.master.mq.startConsuming( gotBuild, ('builders', str(builderId), 'builds', None, None)) subscriber.notifyOnDisconnect(lambda _: self.remote_unsubscribe(subscriber)) # and get any existing builds builds = yield self.master.data.get(('buildrequests', self.brid, 'builds')) for build in builds: if build['buildid'] in reportedBuilds: continue reportedBuilds.add(build['buildid']) yield subscriber.callRemote('newbuild', RemoteBuild( self.master, build, self.builderName), self.builderName) def remote_unsubscribe(self, subscriber): if self.consumer: self.consumer.stopConsuming() self.consumer = None class RemoteBuild(pb.Referenceable): def __init__(self, master, builddict, builderName): self.master = master self.builddict = builddict self.builderName = builderName self.consumer = None @defer.inlineCallbacks def remote_subscribe(self, subscriber, interval): # subscribe to any new steps.. def stepChanged(key, msg): log.msg("SC") if key[-1] == 'started': return subscriber.callRemote('stepStarted', self.builderName, self, msg['name'], None) elif key[-1] == 'finished': return subscriber.callRemote('stepFinished', self.builderName, self, msg['name'], None, msg['results']) self.consumer = yield self.master.mq.startConsuming( stepChanged, ('builds', str(self.builddict['buildid']), 'steps', None, None)) subscriber.notifyOnDisconnect(lambda _: self.remote_unsubscribe(subscriber)) def remote_unsubscribe(self, subscriber): if self.consumer: self.consumer.stopConsuming() self.consumer = None @defer.inlineCallbacks def remote_waitUntilFinished(self): d = defer.Deferred() def buildEvent(key, msg): log.msg("BE") if key[-1] == 'finished': d.callback(None) consumer = yield self.master.mq.startConsuming( buildEvent, ('builds', str(self.builddict['buildid']), None)) yield d # wait for event consumer.stopConsuming() return self # callers expect result=self @defer.inlineCallbacks def remote_getResults(self): buildid = self.builddict['buildid'] builddict = yield self.master.data.get(('builds', buildid)) return builddict['results'] @defer.inlineCallbacks def remote_getText(self): buildid = self.builddict['buildid'] builddict = yield self.master.data.get(('builds', buildid)) return [builddict['state_string']] class Try_Userpass_Perspective(pbutil.NewCredPerspective): def __init__(self, scheduler, username): self.scheduler = scheduler self.username = username @defer.inlineCallbacks def perspective_try(self, branch, revision, patch, repository, project, builderNames, who="", comment="", properties=None): log.msg("user %s requesting build on builders %s" % (self.username, builderNames)) if properties is None: properties = {} # build the intersection of the request and our configured list builderNames = self.scheduler.filterBuilderList(builderNames) if not builderNames: return reason = "'try' job" if who: reason += " by user {}".format(bytes2unicode(who)) if comment: reason += " ({})".format(bytes2unicode(comment)) sourcestamp = dict( branch=branch, revision=revision, repository=repository, project=project, patch_level=patch[0], patch_body=patch[1], patch_subdir='', patch_author=who or '', patch_comment=comment or '', codebase='', ) # note: no way to specify patch subdir - #1769 requested_props = Properties() requested_props.update(properties, "try build") (bsid, brids) = yield self.scheduler.addBuildsetForSourceStamps( sourcestamps=[sourcestamp], reason=reason, properties=requested_props, builderNames=builderNames) # return a remotely-usable BuildSetStatus object bss = RemoteBuildSetStatus(self.scheduler.master, bsid, brids) return bss def perspective_getAvailableBuilderNames(self): # Return a list of builder names that are configured # for the try service # This is mostly intended for integrating try services # into other applications return self.scheduler.listBuilderNames() class Try_Userpass(TryBase): compare_attrs = ('name', 'builderNames', 'port', 'userpass', 'properties') def __init__(self, name, builderNames, port, userpass, **kwargs): super().__init__(name, builderNames, **kwargs) self.port = port self.userpass = userpass self.registrations = [] @defer.inlineCallbacks def activate(self): yield super().activate() if not self.enabled: return # register each user/passwd with the pbmanager def factory(mind, username): return Try_Userpass_Perspective(self, username) for user, passwd in self.userpass: reg = yield self.master.pbmanager.register(self.port, user, passwd, factory) self.registrations.append(reg) @defer.inlineCallbacks def deactivate(self): yield super().deactivate() if not self.enabled: return yield defer.gatherResults( [reg.unregister() for reg in self.registrations]) buildbot-2.6.0/master/buildbot/scripts/000077500000000000000000000000001361162603000201075ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/scripts/__init__.py000066400000000000000000000000001361162603000222060ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/scripts/base.py000066400000000000000000000246351361162603000214050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import errno import os import stat import sys import traceback from contextlib import contextmanager from twisted.python import runtime from twisted.python import usage from buildbot import config as config_module @contextmanager def captureErrors(errors, msg): try: yield except errors as e: print(msg) print(e) return 1 class BusyError(RuntimeError): pass def checkPidFile(pidfile): """ mostly comes from _twistd_unix.py which is not twisted public API :-/ except it returns an exception instead of exiting """ if os.path.exists(pidfile): try: with open(pidfile) as f: pid = int(f.read()) except ValueError: raise ValueError('Pidfile {} contains non-numeric value'.format(pidfile)) try: os.kill(pid, 0) except OSError as why: if why.errno == errno.ESRCH: # The pid doesn't exist. print('Removing stale pidfile {}'.format(pidfile)) os.remove(pidfile) else: raise OSError("Can't check status of PID {} from pidfile {}: {}".format( pid, pidfile, why)) else: raise BusyError("'{}' exists - is this master still running?".format(pidfile)) def checkBasedir(config): if not config['quiet']: print("checking basedir") if not isBuildmasterDir(config['basedir']): return False if runtime.platformType != 'win32': # no pids on win32 if not config['quiet']: print("checking for running master") pidfile = os.path.join(config['basedir'], 'twistd.pid') try: checkPidFile(pidfile) except Exception as e: print(str(e)) return False tac = getConfigFromTac(config['basedir']) if tac: if isinstance(tac.get('rotateLength', 0), str): print("ERROR: rotateLength is a string, it should be a number") print("ERROR: Please, edit your buildbot.tac file and run again") print( "ERROR: See http://trac.buildbot.net/ticket/2588 for more details") return False if isinstance(tac.get('maxRotatedFiles', 0), str): print("ERROR: maxRotatedFiles is a string, it should be a number") print("ERROR: Please, edit your buildbot.tac file and run again") print( "ERROR: See http://trac.buildbot.net/ticket/2588 for more details") return False return True def loadConfig(config, configFileName='master.cfg'): if not config['quiet']: print("checking %s" % configFileName) try: master_cfg = config_module.FileLoader( config['basedir'], configFileName).loadConfig() except config_module.ConfigErrors as e: print("Errors loading configuration:") for msg in e.errors: print(" " + msg) return except Exception: print("Errors loading configuration:") traceback.print_exc(file=sys.stdout) return return master_cfg def isBuildmasterDir(dir): def print_error(error_message): print("%s\ninvalid buildmaster directory '%s'" % (error_message, dir)) buildbot_tac = os.path.join(dir, "buildbot.tac") try: with open(buildbot_tac) as f: contents = f.read() except IOError as exception: print_error("error reading '%s': %s" % (buildbot_tac, exception.strerror)) return False if "Application('buildmaster')" not in contents: print_error("unexpected content in '%s'" % buildbot_tac) return False return True def getConfigFromTac(basedir, quiet=False): tacFile = os.path.join(basedir, 'buildbot.tac') if os.path.exists(tacFile): # don't mess with the global namespace, but set __file__ for # relocatable buildmasters tacGlobals = {'__file__': tacFile} try: with open(tacFile) as f: exec(f.read(), tacGlobals) except Exception: if not quiet: traceback.print_exc() raise return tacGlobals return None def getConfigFileFromTac(basedir, quiet=False): # execute the .tac file to see if its configfile location exists config = getConfigFromTac(basedir, quiet=quiet) if config: return config.get("configfile", "master.cfg") return "master.cfg" class SubcommandOptions(usage.Options): # subclasses should set this to a list-of-lists in order to source the # .buildbot/options file. Note that this *only* works with optParameters, # not optFlags. Example: # buildbotOptions = [ [ 'optfile-name', 'parameter-name' ], .. ] buildbotOptions = None # set this to options that must have non-None values requiredOptions = [] def __init__(self, *args): # for options in self.buildbotOptions, optParameters, and the options # file, change the default in optParameters to the value in the options # file, call through to the constructor, and then change it back. # Options uses reflect.accumulateClassList, so this *must* be a class # attribute; however, we do not want to permanently change the class. # So we patch it temporarily and restore it after. cls = self.__class__ if hasattr(cls, 'optParameters'): old_optParameters = cls.optParameters cls.optParameters = op = copy.deepcopy(cls.optParameters) if self.buildbotOptions: optfile = self.optionsFile = self.loadOptionsFile() # pylint: disable=not-an-iterable for optfile_name, option_name in self.buildbotOptions: for i, val in enumerate(op): if (op[i][0] == option_name and optfile_name in optfile): op[i] = list(op[i]) op[i][2] = optfile[optfile_name] super().__init__(*args) if hasattr(cls, 'optParameters'): cls.optParameters = old_optParameters def loadOptionsFile(self, _here=None): """Find the .buildbot/options file. Crawl from the current directory up towards the root, and also look in ~/.buildbot . The first directory that's owned by the user and has the file we're looking for wins. Windows skips the owned-by-user test. @rtype: dict @return: a dictionary of names defined in the options file. If no options file was found, return an empty dict. """ here = _here or os.path.abspath(os.getcwd()) if runtime.platformType == 'win32': # never trust env-vars, use the proper API from win32com.shell import shellcon, shell appdata = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, 0, 0) home = os.path.join(appdata, "buildbot") else: home = os.path.expanduser("~/.buildbot") searchpath = [] toomany = 20 while True: searchpath.append(os.path.join(here, ".buildbot")) next = os.path.dirname(here) if next == here: break # we've hit the root here = next toomany -= 1 # just in case if toomany == 0: print("I seem to have wandered up into the infinite glories " "of the heavens. Oops.") break searchpath.append(home) localDict = {} for d in searchpath: if os.path.isdir(d): if runtime.platformType != 'win32': if os.stat(d)[stat.ST_UID] != os.getuid(): print("skipping %s because you don't own it" % d) continue # security, skip other people's directories optfile = os.path.join(d, "options") if os.path.exists(optfile): try: with open(optfile, "r") as f: options = f.read() exec(options, localDict) except Exception: print("error while reading %s" % optfile) raise break for k in list(localDict.keys()): # pylint: disable=consider-iterating-dictionary if k.startswith("__"): del localDict[k] return localDict def postOptions(self): missing = [k for k in self.requiredOptions if self[k] is None] if missing: if len(missing) > 1: msg = 'Required arguments missing: ' + ', '.join(missing) else: msg = 'Required argument missing: ' + missing[0] raise usage.UsageError(msg) class BasedirMixin: """SubcommandOptions Mixin to handle subcommands that take a basedir argument""" # on tab completion, suggest directories as first argument if hasattr(usage, 'Completions'): # only set completion suggestion if running with # twisted version (>=11.1.0) that supports it compData = usage.Completions( extraActions=[usage.CompleteDirs(descr="buildbot base directory")]) def parseArgs(self, *args): if args: self['basedir'] = args[0] else: # Use the current directory if no basedir was specified. self['basedir'] = os.getcwd() if len(args) > 1: raise usage.UsageError("I wasn't expecting so many arguments") def postOptions(self): # get an unambiguous, expanded basedir, including expanding '~', which # may be useful in a .buildbot/config file self['basedir'] = os.path.abspath(os.path.expanduser(self['basedir'])) buildbot-2.6.0/master/buildbot/scripts/buildbot_tac.tmpl000066400000000000000000000024351361162603000234440ustar00rootroot00000000000000import os from twisted.application import service from buildbot.master import BuildMaster {% if relocatable -%} basedir = '.' {% else -%} basedir = {{ basedir|repr }} {%- endif %} {% if not no_logrotate -%} rotateLength = {{ '%d' | format(log_size) }} maxRotatedFiles = {{ ('%d' | format(log_count)) if log_count != None else 'None' }} {%- endif %} configfile = {{ config|repr }} # Default umask for server umask = None # if this is a relocatable tac file, get the directory containing the TAC if basedir == '.': basedir = os.path.abspath(os.path.dirname(__file__)) # note: this line is matched against to check that this is a buildmaster # directory; do not edit it. application = service.Application('buildmaster') {% if not no_logrotate -%} from twisted.python.logfile import LogFile from twisted.python.log import ILogObserver, FileLogObserver logfile = LogFile.fromFullPath(os.path.join(basedir, "twistd.log"), rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) application.setComponent(ILogObserver, FileLogObserver(logfile).emit) {%- endif %} m = BuildMaster(basedir, configfile, umask) m.setServiceParent(application) {% if not no_logrotate -%} m.log_rotation.rotateLength = rotateLength m.log_rotation.maxRotatedFiles = maxRotatedFiles {%- endif %} buildbot-2.6.0/master/buildbot/scripts/checkconfig.py000066400000000000000000000035261361162603000227320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from buildbot import config from buildbot.scripts.base import getConfigFileFromTac from buildbot.util import in_reactor def _loadConfig(basedir, configFile, quiet): try: config.FileLoader(basedir, configFile).loadConfig() except config.ConfigErrors as e: if not quiet: print("Configuration Errors:", file=sys.stderr) for e in e.errors: print(" " + e, file=sys.stderr) return 1 if not quiet: print("Config file is good!") return 0 @in_reactor def checkconfig(config): quiet = config.get('quiet') configFile = config.get('configFile', os.getcwd()) if os.path.isdir(configFile): basedir = configFile try: configFile = getConfigFileFromTac(basedir, quiet=quiet) except Exception: if not quiet: # the exception is already printed in base.py print("Unable to load 'buildbot.tac' from '%s':" % basedir) return 1 else: basedir = os.getcwd() return _loadConfig(basedir=basedir, configFile=configFile, quiet=quiet) __all__ = ['checkconfig'] buildbot-2.6.0/master/buildbot/scripts/cleanupdb.py000066400000000000000000000071501361162603000224210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.internet import defer from buildbot import config as config_module from buildbot import monkeypatches from buildbot.master import BuildMaster from buildbot.scripts import base from buildbot.util import in_reactor @defer.inlineCallbacks def doCleanupDatabase(config, master_cfg): if not config['quiet']: print("cleaning database (%s)" % (master_cfg.db['db_url'])) master = BuildMaster(config['basedir']) master.config = master_cfg db = master.db yield db.setup(check_version=False, verbose=not config['quiet']) res = yield db.logs.getLogs() i = 0 percent = 0 saved = 0 for log in res: saved += yield db.logs.compressLog(log['id'], force=config['force']) i += 1 if not config['quiet'] and percent != i * 100 / len(res): percent = i * 100 / len(res) print(" {0}% {1} saved".format(percent, saved)) saved = 0 sys.stdout.flush() if master_cfg.db['db_url'].startswith("sqlite"): if not config['quiet']: print("executing sqlite vacuum function...") # sqlite vacuum function rebuild the whole database to claim # free disk space back def thd(engine): # In Python 3.6 and higher, sqlite3 no longer commits an # open transaction before DDL statements. # It is necessary to set the isolation_level to none # for auto-commit mode before doing a VACUUM. # See: https://bugs.python.org/issue28518 # Get the underlying sqlite connection from SQLAlchemy. sqlite_conn = engine.connection.connection # Set isolation_level to 'auto-commit mode' sqlite_conn.isolation_level = None sqlite_conn.execute("vacuum;").close() yield db.pool.do(thd) @in_reactor def cleanupDatabase(config, _noMonkey=False): # pragma: no cover # we separate the actual implementation to protect unit tests # from @in_reactor which stops the reactor if not _noMonkey: monkeypatches.patch_all() return _cleanupDatabase(config, _noMonkey=False) @defer.inlineCallbacks def _cleanupDatabase(config, _noMonkey=False): if not base.checkBasedir(config): return 1 config['basedir'] = os.path.abspath(config['basedir']) os.chdir(config['basedir']) with base.captureErrors((SyntaxError, ImportError), "Unable to load 'buildbot.tac' from '%s':" % (config['basedir'],)): configFile = base.getConfigFileFromTac(config['basedir']) with base.captureErrors(config_module.ConfigErrors, "Unable to load '%s' from '%s':" % (configFile, config['basedir'])): master_cfg = base.loadConfig(config, configFile) if not master_cfg: return 1 yield doCleanupDatabase(config, master_cfg) if not config['quiet']: print("cleanup complete") return 0 buildbot-2.6.0/master/buildbot/scripts/create_master.py000066400000000000000000000070701361162603000233030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import jinja2 from twisted.internet import defer from twisted.python import util from buildbot import config as config_module from buildbot import monkeypatches from buildbot.master import BuildMaster from buildbot.util import in_reactor def makeBasedir(config): if os.path.exists(config['basedir']): if not config['quiet']: print("updating existing installation") return if not config['quiet']: print("mkdir", config['basedir']) os.mkdir(config['basedir']) def makeTAC(config): # render buildbot_tac.tmpl using the config loader = jinja2.FileSystemLoader(os.path.dirname(__file__)) env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined) env.filters['repr'] = repr tpl = env.get_template('buildbot_tac.tmpl') cxt = dict((k.replace('-', '_'), v) for k, v in config.items()) contents = tpl.render(cxt) tacfile = os.path.join(config['basedir'], "buildbot.tac") if os.path.exists(tacfile): with open(tacfile, "rt") as f: oldcontents = f.read() if oldcontents == contents: if not config['quiet']: print("buildbot.tac already exists and is correct") return if not config['quiet']: print("not touching existing buildbot.tac") print("creating buildbot.tac.new instead") tacfile += ".new" with open(tacfile, "wt") as f: f.write(contents) def makeSampleConfig(config): source = util.sibpath(__file__, "sample.cfg") target = os.path.join(config['basedir'], "master.cfg.sample") if not config['quiet']: print("creating %s" % target) with open(source, "rt") as f: config_sample = f.read() if config['db']: config_sample = config_sample.replace('sqlite:///state.sqlite', config['db']) with open(target, "wt") as f: f.write(config_sample) os.chmod(target, 0o600) @defer.inlineCallbacks def createDB(config, _noMonkey=False): # apply the db monkeypatches (and others - no harm) if not _noMonkey: # pragma: no cover monkeypatches.patch_all() # create a master with the default configuration, but with db_url # overridden master_cfg = config_module.MasterConfig() master_cfg.db['db_url'] = config['db'] master = BuildMaster(config['basedir']) master.config = master_cfg db = master.db yield db.setup(check_version=False, verbose=not config['quiet']) if not config['quiet']: print("creating database (%s)" % (master_cfg.db['db_url'],)) yield db.model.upgrade() @in_reactor @defer.inlineCallbacks def createMaster(config): makeBasedir(config) makeTAC(config) makeSampleConfig(config) yield createDB(config) if not config['quiet']: print("buildmaster configured in %s" % (config['basedir'],)) return 0 buildbot-2.6.0/master/buildbot/scripts/dataspec.py000066400000000000000000000027231361162603000222510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import sys from twisted.internet import defer from buildbot.data import connector from buildbot.test.fake import fakemaster from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def dataspec(config): master = yield fakemaster.make_master(None, wantRealReactor=True) data = connector.DataConnector() yield data.setServiceParent(master) if config['out'] != '--': dirs = os.path.dirname(config['out']) if dirs and not os.path.exists(dirs): os.makedirs(dirs) f = open(config['out'], "w") else: f = sys.stdout if config['global'] is not None: f.write("window." + config['global'] + '=') f.write(json.dumps(data.allEndpoints(), indent=2)) f.close() return 0 buildbot-2.6.0/master/buildbot/scripts/devproxy.py000066400000000000000000000173331361162603000223500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import asyncio import json import logging import aiohttp # dev-proxy command requires aiohttp! run 'pip install aiohttp' import aiohttp.web import jinja2 from buildbot.plugins.db import get_plugins log = logging.getLogger(__name__) class DevProxy: MAX_CONNECTIONS = 10 def __init__(self, port, next_url, plugins, unsafe_ssl, auth_cookie): while next_url.endswith('/'): next_url = next_url[:-1] self.next_url = next_url self.app = app = aiohttp.web.Application() self.apps = get_plugins('www', None, load_now=True) self.unsafe_ssl = unsafe_ssl cookies = {} if auth_cookie: if "TWISTED_SESSION" in auth_cookie: # user pasted the whole document.cookie part! cookies = dict(c.split("=") for c in auth_cookie.split(";")) auth_cookie = cookies["TWISTED_SESSION"] cookies = {'TWISTED_SESSION': auth_cookie} logging.basicConfig(level=logging.DEBUG) if plugins is None: plugins = {} else: plugins = json.loads(plugins) self.plugins = plugins app.router.add_route('*', '/ws', self.ws_handler) for path in ['/api', '/auth', '/sse', '/avatar']: app.router.add_route('*', path + '{path:.*}', self.proxy_handler) app.router.add_route('*', '/', self.index_handler) for plugin in self.apps.names: if plugin != 'base': staticdir = self.apps.get(plugin).static_dir app.router.add_static('/' + plugin, staticdir) staticdir = self.staticdir = self.apps.get('base').static_dir loader = jinja2.FileSystemLoader(staticdir) self.jinja = jinja2.Environment( loader=loader, undefined=jinja2.StrictUndefined) app.router.add_static('/', staticdir) conn = aiohttp.TCPConnector( limit=self.MAX_CONNECTIONS, verify_ssl=(not self.unsafe_ssl)) self.session = aiohttp.ClientSession(connector=conn, trust_env=True, cookies=cookies) self.config = None self.buildbotURL = "http://localhost:{}/".format(port) app.on_startup.append(self.on_startup) app.on_cleanup.append(self.on_cleanup) aiohttp.web.run_app(app, host="localhost", port=port) async def on_startup(self, app): try: await self.fetch_config_from_upstream() except aiohttp.ClientConnectionError as e: raise RuntimeError("Unable to connect to buildbot master" + str(e)) async def on_cleanup(self, app): await self.session.close() async def ws_handler(self, req): # based on https://github.com/oetiker/aio-reverse-proxy/blob/master/paraview-proxy.py ws_server = aiohttp.web.WebSocketResponse() await ws_server.prepare(req) async with self.session.ws_connect( self.next_url + "/ws", headers=req.headers ) as ws_client: async def ws_forward(ws_from, ws_to): async for msg in ws_from: if ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) return if msg.type == aiohttp.WSMsgType.TEXT: await ws_to.send_str(msg.data) elif msg.type == aiohttp.WSMsgType.BINARY: await ws_to.send_bytes(msg.data) elif msg.type == aiohttp.WSMsgType.PING: await ws_to.ping() elif msg.type == aiohttp.WSMsgType.PONG: await ws_to.pong() else: raise ValueError('unexpected message type: %s' % msg) # keep forwarding websocket data in both directions await asyncio.wait( [ ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server) ], return_when=asyncio.FIRST_COMPLETED) return ws_server async def proxy_handler(self, req): method = getattr(self.session, req.method.lower()) upstream_url = self.next_url + req.path headers = req.headers.copy() query = req.query try: # note that req.content is a StreamReader, so the data is streamed # and not fully loaded in memory (unlike with python-requests) async with method(upstream_url, headers=headers, params=query, allow_redirects=False, data=req.content) as request: response = aiohttp.web.StreamResponse( status=request.status, headers=request.headers) writer = await response.prepare(req) while True: chunk = await request.content.readany() if not chunk: break # using writer.write instead of response.write saves a few checks await writer.write(chunk) return response except aiohttp.ClientConnectionError as e: return self.connection_error(e) def connection_error(self, error): return aiohttp.web.Response(text='Unable to connect to upstream server {} ({!s})'.format( self.next_url, error), status=502) async def fetch_config_from_upstream(self): async with self.session.get(self.next_url) as request: index = await request.content.read() if request.status != 200: raise RuntimeError("Unable to fetch buildbot config: " + index.decode()) # hack to parse the configjson from upstream buildbot config start_delimiter = b'angular.module("buildbot_config", []).constant("config", ' start_index = index.index(start_delimiter) last_index = index.index(b')') self.config = json.loads( index[start_index + len(start_delimiter):last_index].decode()) # keep the original config, but remove the plugins that we don't know for plugin in list(self.config['plugins'].keys()): if plugin not in self.apps: del self.config['plugins'][plugin] log.warn("warning: Missing plugin compared to original buildbot: %s", plugin) # add the plugins configs passed in cmdline for k, v in self.plugins.items(): self.config['plugins'][k] = v self.config['buildbotURL'] = self.buildbotURL self.config['buildbotURLs'] = [self.buildbotURL, self.next_url + "/"] async def index_handler(self, req): tpl = self.jinja.get_template('index.html') index = tpl.render(configjson=json.dumps(self.config), custom_templates={}, config=self.config) return aiohttp.web.Response(body=index, content_type='text/html') def devproxy(config): DevProxy(config['port'], config['buildbot_url'], config['plugins'], config['unsafe_ssl'], config['auth_cookie']) buildbot-2.6.0/master/buildbot/scripts/logwatcher.py000066400000000000000000000116351361162603000226260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform from twisted.internet import defer from twisted.internet import error from twisted.internet import protocol from twisted.internet import reactor from twisted.protocols.basic import LineOnlyReceiver from twisted.python.failure import Failure from buildbot.util import unicode2bytes class FakeTransport: disconnecting = False class BuildmasterTimeoutError(Exception): pass class BuildmasterStartupError(Exception): pass class ReconfigError(Exception): pass class TailProcess(protocol.ProcessProtocol): def outReceived(self, data): self.lw.dataReceived(data) def errReceived(self, data): print("ERR: '%s'" % (data,)) class LogWatcher(LineOnlyReceiver): POLL_INTERVAL = 0.1 TIMEOUT_DELAY = 10.0 delimiter = unicode2bytes(os.linesep) def __init__(self, logfile, timeout=None, _reactor=reactor): self.logfile = logfile self.in_reconfig = False self.transport = FakeTransport() self.pp = TailProcess() self.pp.lw = self self.timer = None self._reactor = _reactor self._timeout_delay = timeout or self.TIMEOUT_DELAY def start(self): # If the log file doesn't exist, create it now. if not os.path.exists(self.logfile): open(self.logfile, 'a').close() # return a Deferred that fires when the reconfig process has # finished. It errbacks with TimeoutError if the startup has not # progressed for 10 seconds, and with ReconfigError if the error # line was seen. If the logfile could not be opened, it errbacks with # an IOError. if platform.system().lower() == 'sunos' and os.path.exists('/usr/xpg4/bin/tail'): tailBin = "/usr/xpg4/bin/tail" else: tailBin = "/usr/bin/tail" args = ("tail", "-f", "-n", "0", self.logfile) self.p = self._reactor.spawnProcess(self.pp, tailBin, args, env=os.environ) self.running = True d = defer.maybeDeferred(self._start) return d def _start(self): self.d = defer.Deferred() self.startTimer() return self.d def startTimer(self): self.timer = self._reactor.callLater(self._timeout_delay, self.timeout) def timeout(self): # was the timeout set to be ignored? if so, restart it if not self.timer: self.startTimer() return self.timer = None e = BuildmasterTimeoutError() self.finished(Failure(e)) def finished(self, results): try: self.p.signalProcess("KILL") except error.ProcessExitedAlready: pass if self.timer: self.timer.cancel() self.timer = None self.running = False self.in_reconfig = False self.d.callback(results) def lineReceived(self, line): if not self.running: return if b"Log opened." in line: self.in_reconfig = True if b"beginning configuration update" in line: self.in_reconfig = True if self.in_reconfig: print(line.decode()) # certain lines indicate progress, so we "cancel" the timeout # and it will get re-added when it fires PROGRESS_TEXT = [b'Starting BuildMaster', b'Loading configuration from', b'added builder', b'adding scheduler', b'Loading builder', b'Starting factory'] for progressText in PROGRESS_TEXT: if progressText in line: self.timer = None break if b"message from master: attached" in line: return self.finished("worker") if b"reconfig aborted" in line or b'reconfig partially applied' in line: return self.finished(Failure(ReconfigError())) if b"Server Shut Down" in line: return self.finished(Failure(ReconfigError())) if b"configuration update complete" in line: return self.finished("buildmaster") if b"BuildMaster is running" in line: return self.finished("buildmaster") if b"BuildMaster startup failed" in line: return self.finished(Failure(BuildmasterStartupError())) buildbot-2.6.0/master/buildbot/scripts/reconfig.py000066400000000000000000000060401361162603000222550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform import signal from twisted.internet import reactor from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.util import in_reactor from buildbot.util import rewrap class Reconfigurator: rc = 0 def run(self, basedir, quiet): # Returns "Microsoft" for Vista and "Windows" for other versions if platform.system() in ("Windows", "Microsoft"): print("Reconfig (through SIGHUP) is not supported on Windows.") return with open(os.path.join(basedir, "twistd.pid"), "rt") as f: self.pid = int(f.read().strip()) if quiet: os.kill(self.pid, signal.SIGHUP) return # keep reading twistd.log. Display all messages between "loading # configuration from ..." and "configuration update complete" or # "I will keep using the previous config file instead.", or until # 10 seconds have elapsed. self.sent_signal = False reactor.callLater(0.2, self.sighup) lw = LogWatcher(os.path.join(basedir, "twistd.log")) d = lw.start() d.addCallbacks(self.success, self.failure) d.addBoth(lambda _: self.rc) return d def sighup(self): if self.sent_signal: return print("sending SIGHUP to process %d" % self.pid) self.sent_signal = True os.kill(self.pid, signal.SIGHUP) def success(self, res): print("Reconfiguration appears to have completed successfully") def failure(self, why): self.rc = 1 if why.check(BuildmasterTimeoutError): print("Never saw reconfiguration finish.") elif why.check(ReconfigError): print(rewrap("""\ Reconfiguration failed. Please inspect the master.cfg file for errors, correct them, then try 'buildbot reconfig' again. """)) elif why.check(IOError): # we were probably unable to open the file in the first place self.sighup() else: print("Error while following twistd.log: %s" % why) @in_reactor def reconfig(config): basedir = config['basedir'] quiet = config['quiet'] r = Reconfigurator() return r.run(basedir, quiet) buildbot-2.6.0/master/buildbot/scripts/restart.py000066400000000000000000000021321361162603000221430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.scripts import base from buildbot.scripts import start from buildbot.scripts import stop def restart(config): basedir = config['basedir'] quiet = config['quiet'] if not base.isBuildmasterDir(basedir): return 1 if stop.stop(config, wait=True) != 0: return 1 if not quiet: print("now restarting buildbot process..") return start.start(config) buildbot-2.6.0/master/buildbot/scripts/runner.py000066400000000000000000000712671361162603000220070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # N.B.: don't import anything that might pull in a reactor yet. Some of our # subcommands want to load modules that need the gtk reactor. # # Also don't forget to mirror your changes on command-line options in manual # pages and reStructuredText documentation. import getpass import sys import textwrap import sqlalchemy as sa from twisted.python import reflect from twisted.python import usage import buildbot from buildbot.scripts import base from buildbot.util import check_functional_environment # Note that the terms 'options' and 'config' are used interchangeably here - in # fact, they are interchanged several times. Caveat legator. def validateMasterOption(master): """ Validate master (-m, --master) command line option. Checks that option is a string of the 'hostname:port' form, otherwise raises an UsageError exception. @type master: string @param master: master option @raise usage.UsageError: on invalid master option """ try: hostname, port = master.split(":") port = int(port) except (TypeError, ValueError): raise usage.UsageError("master must have the form 'hostname:port'") class UpgradeMasterOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.upgrade_master.upgradeMaster" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["develop", "d", "link to buildbot dir rather than copy, with no " "JS optimization (UNIX only)"], ["replace", "r", "Replace any modified files without confirmation."], ] optParameters = [ ] def getSynopsis(self): return "Usage: buildbot upgrade-master [options] []" longdesc = textwrap.dedent(""" This command takes an existing buildmaster working directory and adds/modifies the files there to work with the current version of buildbot. When this command is finished, the buildmaster directory should look much like a brand-new one created by the 'create-master' command. Use this after you've upgraded your buildbot installation and before you restart the buildmaster to use the new version. If you have modified the files in your working directory, this command will leave them untouched, but will put the new recommended contents in a .new file (for example, if index.html has been modified, this command will create index.html.new). You can then look at the new version and decide how to merge its contents into your modified file. When upgrading the database, this command uses the database specified in the master configuration file. If you wish to use a database other than the default (sqlite), be sure to set that parameter before upgrading. """) class CreateMasterOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.create_master.createMaster" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["force", "f", "Re-use an existing directory (will not overwrite master.cfg file)"], ["relocatable", "r", "Create a relocatable buildbot.tac"], ["develop", "d", "link to buildbot dir rather than copy, with no " "JS optimization (UNIX only)"], ["no-logrotate", "n", "Do not permit buildmaster rotate logs by itself"] ] optParameters = [ ["config", "c", "master.cfg", "name of the buildmaster config file"], ["log-size", "s", 10000000, "size at which to rotate twisted log files", int], ["log-count", "l", 10, "limit the number of kept old twisted log files"], ["db", None, "sqlite:///state.sqlite", "which DB to use for scheduler/status state. See below for syntax."], ] def getSynopsis(self): return "Usage: buildbot create-master [options] []" longdesc = textwrap.dedent(""" This command creates a buildmaster working directory and buildbot.tac file. The master will live in (defaults to the current directory) and create various files there. If --relocatable is given, then the resulting buildbot.tac file will be written such that its containing directory is assumed to be the basedir. This is generally a good idea. At runtime, the master will read a configuration file (named 'master.cfg' by default) in its basedir. This file should contain python code which eventually defines a dictionary named 'BuildmasterConfig'. The elements of this dictionary are used to configure the Buildmaster. See doc/config.xhtml for details about what can be controlled through this interface. The --db string is evaluated to build the DB object, which specifies which database the buildmaster should use to hold scheduler state and status information. The default (which creates an SQLite database in BASEDIR/state.sqlite) is equivalent to: --db='sqlite:///state.sqlite' To use a remote MySQL database instead, use something like: --db='mysql://bbuser:bbpasswd@dbhost/bbdb' The --db string is stored verbatim in the buildbot.tac file, and evaluated at 'buildbot start' time to pass a DBConnector instance into the newly-created BuildMaster object. """) def postOptions(self): super().postOptions() # validate 'log-count' parameter if self['log-count'] == 'None': self['log-count'] = None else: try: self['log-count'] = int(self['log-count']) except ValueError: raise usage.UsageError( "log-count parameter needs to be an int or None") # validate 'db' parameter try: # check if sqlalchemy will be able to parse specified URL sa.engine.url.make_url(self['db']) except sa.exc.ArgumentError: raise usage.UsageError("could not parse database URL '%s'" % self['db']) class StopOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.stop.stop" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["clean", "c", "Clean shutdown master"], ["no-wait", None, "Don't wait for complete master shutdown"], ] def getSynopsis(self): return "Usage: buildbot stop []" class RestartOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.restart.restart" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ["clean", "c", "Clean shutdown master"], ] optParameters = [ ['start_timeout', None, None, 'The amount of time the script waits for the master to restart until ' 'it declares the operation as failure'], ] def getSynopsis(self): return "Usage: buildbot restart []" class StartOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.start.start" optFlags = [ ['quiet', 'q', "Don't display startup log messages"], ['nodaemon', None, "Don't daemonize (stay in foreground)"], ] optParameters = [ ['start_timeout', None, None, 'The amount of time the script waits for the master to start until it ' 'declares the operation as failure'], ] def getSynopsis(self): return "Usage: buildbot start []" class ReconfigOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.reconfig.reconfig" optFlags = [ ['quiet', 'q', "Don't display log messages about reconfiguration"], ] def getSynopsis(self): return "Usage: buildbot reconfig []" class SendChangeOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.sendchange.sendchange" def __init__(self): super().__init__() self['properties'] = {} optParameters = [ ("master", "m", None, "Location of the buildmaster's PBChangeSource (host:port)"), # deprecated in 0.8.3; remove in 0.8.5 (bug #1711) ("auth", "a", 'change:changepw', "Authentication token - username:password, or prompt for password"), ("who", "W", None, "Author of the commit"), ("repository", "R", '', "Repository specifier"), ("vc", "s", None, "The VC system in use, one of: cvs, svn, darcs, hg, " "bzr, git, mtn, p4"), ("project", "P", '', "Project specifier"), ("branch", "b", None, "Branch specifier"), ("category", "C", None, "Category of repository"), ("codebase", None, None, "Codebase this change is in (requires 0.8.7 master or later)"), ("revision", "r", None, "Revision specifier"), ("revision_file", None, None, "Filename containing revision spec"), ("property", "p", None, "A property for the change, in the format: name:value"), ("comments", "c", None, "log message"), ("logfile", "F", None, "Read the log messages from this file (- for stdin)"), ("when", "w", None, "timestamp to use as the change time"), ("revlink", "l", '', "Revision link (revlink)"), ("encoding", "e", 'utf8', "Encoding of other parameters"), ] buildbotOptions = [ ['master', 'master'], ['who', 'who'], ['branch', 'branch'], ['category', 'category'], ['vc', 'vc'], ] requiredOptions = ['who', 'master'] def getSynopsis(self): return "Usage: buildbot sendchange [options] filenames.." def parseArgs(self, *args): self['files'] = args def opt_property(self, property): name, value = property.split(':', 1) self['properties'][name] = value def postOptions(self): super().postOptions() if self.get("revision_file"): with open(self["revision_file"], "r") as f: self['revision'] = f.read() if self.get('when'): try: self['when'] = float(self['when']) except (TypeError, ValueError): raise usage.UsageError('invalid "when" value %s' % (self['when'],)) else: self['when'] = None if not self.get('comments') and self.get('logfile'): if self['logfile'] == "-": self['comments'] = sys.stdin.read() else: with open(self['logfile'], "rt") as f: self['comments'] = f.read() if self.get('comments') is None: self['comments'] = "" # fix up the auth with a password if none was given auth = self.get('auth') if ':' not in auth: pw = getpass.getpass("Enter password for '%s': " % auth) auth = "%s:%s" % (auth, pw) self['auth'] = tuple(auth.split(':', 1)) vcs = ['cvs', 'svn', 'darcs', 'hg', 'bzr', 'git', 'mtn', 'p4'] if self.get('vc') and self.get('vc') not in vcs: raise usage.UsageError("vc must be one of %s" % (', '.join(vcs))) validateMasterOption(self.get('master')) class TryOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.trycmd.trycmd" optParameters = [ ["connect", "c", None, "How to reach the buildmaster, either 'ssh' or 'pb'"], # for ssh, use --host, --username, --jobdir and optionally # --ssh ["host", None, None, "Hostname (used by ssh) for the buildmaster"], ["port", None, None, "Port (used by ssh) for the buildmaster"], ["jobdir", None, None, "Directory (on the buildmaster host) where try jobs are deposited"], ["ssh", None, None, "Command to use instead of the default \"ssh\""], ["username", "u", None, "Username performing the try build"], # for PB, use --master, --username, and --passwd ["master", "m", None, "Location of the buildmaster's Try server (host:port)"], ["passwd", None, None, "Password for PB authentication"], ["who", "w", None, "Who is responsible for the try build"], ["comment", "C", None, "A comment which can be used in notifications for this build"], # for ssh to accommodate running in a virtualenv on the buildmaster ["buildbotbin", None, "buildbot", "buildbot binary to use on the buildmaster host"], ["diff", None, None, "Filename of a patch to use instead of scanning a local tree. " "Use '-' for stdin."], ["patchlevel", "p", 0, "Number of slashes to remove from patch pathnames, " "like the -p option to 'patch'"], ["baserev", None, None, "Base revision to use instead of scanning a local tree."], ["vc", None, None, "The VC system in use, one of: bzr, cvs, darcs, git, hg, " "mtn, p4, svn"], ["branch", None, None, "The branch in use, for VC systems that can't figure it out " "themselves"], ["repository", None, None, "Repository to use, instead of path to working directory."], ["builder", "b", None, "Run the trial build on this Builder. Can be used multiple times."], ["properties", None, None, "A set of properties made available in the build environment, " "format is --properties=prop1=value1,prop2=value2,.. " "option can be specified multiple times."], ["property", None, None, "A property made available in the build environment, " "format:prop=value. Can be used multiple times."], ["topfile", None, None, "Name of a file at the top of the tree, used to find the top. " "Only needed for SVN and CVS."], ["topdir", None, None, "Path to the top of the working copy. Only needed for SVN and CVS."], ] optFlags = [ ["wait", None, "wait until the builds have finished"], ["dryrun", 'n', "Gather info, but don't actually submit."], ["get-builder-names", None, "Get the names of available builders. Doesn't submit anything. " "Only supported for 'pb' connections."], ["quiet", "q", "Don't print status of current builds while waiting."], ] # Mapping of .buildbot/options names to command-line options buildbotOptions = [ ['try_connect', 'connect'], # [ 'try_builders', 'builders' ], <-- handled in postOptions ['try_vc', 'vc'], ['try_branch', 'branch'], ['try_repository', 'repository'], ['try_topdir', 'topdir'], ['try_topfile', 'topfile'], ['try_host', 'host'], ['try_username', 'username'], ['try_jobdir', 'jobdir'], ['try_ssh', 'ssh'], ['try_buildbotbin', 'buildbotbin'], ['try_passwd', 'passwd'], ['try_master', 'master'], ['try_who', 'who'], ['try_comment', 'comment'], # [ 'try_wait', 'wait' ], <-- handled in postOptions # [ 'try_quiet', 'quiet' ], <-- handled in postOptions # Deprecated command mappings from the quirky old days: ['try_masterstatus', 'master'], ['try_dir', 'jobdir'], ['try_password', 'passwd'], ] def __init__(self): super().__init__() self['builders'] = [] self['properties'] = {} def opt_builder(self, option): self['builders'].append(option) def opt_properties(self, option): # We need to split the value of this option # into a dictionary of properties propertylist = option.split(",") for prop in propertylist: splitproperty = prop.split("=", 1) self['properties'][splitproperty[0]] = splitproperty[1] def opt_property(self, option): name, _, value = option.partition("=") self['properties'][name] = value def opt_patchlevel(self, option): self['patchlevel'] = int(option) def getSynopsis(self): return "Usage: buildbot try [options]" def postOptions(self): super().postOptions() opts = self.optionsFile if not self['builders']: self['builders'] = opts.get('try_builders', []) if opts.get('try_wait', False): self['wait'] = True if opts.get('try_quiet', False): self['quiet'] = True # get the global 'masterstatus' option if it's set and no master # was specified otherwise if not self['master']: self['master'] = opts.get('masterstatus', None) if self['connect'] == 'pb': if not self['master']: raise usage.UsageError("master location must be specified" "for 'pb' connections") validateMasterOption(self['master']) class TryServerOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.tryserver.tryserver" optParameters = [ ["jobdir", None, None, "the jobdir (maildir) for submitting jobs"], ] requiredOptions = ['jobdir'] def getSynopsis(self): return "Usage: buildbot tryserver [options]" def postOptions(self): if not self['jobdir']: raise usage.UsageError('jobdir is required') class CheckConfigOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.checkconfig.checkconfig" optFlags = [ ['quiet', 'q', "Don't display error messages or tracebacks"], ] # on tab completion, suggest files as first argument if hasattr(usage, 'Completions'): # only set completion suggestion if running with # twisted version (>=11.1.0) that supports it compData = usage.Completions(extraActions=[usage.CompleteFiles()]) def getSynopsis(self): return "Usage:\t\tbuildbot checkconfig [configFile]\n" + \ "\t\tIf not specified, the config file specified in " + \ "'buildbot.tac' from the current directory will be used" def parseArgs(self, *args): if len(args) >= 1: self['configFile'] = args[0] class UserOptions(base.SubcommandOptions): subcommandFunction = "buildbot.scripts.user.user" optParameters = [ ["master", "m", None, "Location of the buildmaster's user service (host:port)"], ["username", "u", None, "Username for PB authentication"], ["passwd", "p", None, "Password for PB authentication"], ["op", None, None, "User management operation: add, remove, update, get"], ["bb_username", None, None, "Username to set for a given user. Only available on 'update', " "and bb_password must be given as well."], ["bb_password", None, None, "Password to set for a given user. Only available on 'update', " "and bb_username must be given as well."], ["ids", None, None, "User's identifiers, used to find users in 'remove' and 'get' " "Can be specified multiple times (--ids=id1,id2,id3)"], ["info", None, None, "User information in the form: --info=type=value,type=value,.. " "Used in 'add' and 'update', can be specified multiple times. " "Note that 'update' requires --info=id:type=value..."] ] buildbotOptions = [ ['master', 'master'], ['user_master', 'master'], ['user_username', 'username'], ['user_passwd', 'passwd'], ] requiredOptions = ['master'] longdesc = textwrap.dedent(""" Currently implemented types for --info= are:\n git, svn, hg, cvs, darcs, bzr, email """) def __init__(self): super().__init__() self['ids'] = [] self['info'] = [] def opt_ids(self, option): id_list = option.split(",") self['ids'].extend(id_list) def opt_info(self, option): # splits info into type/value dictionary, appends to info info_list = option.split(",") info_elem = {} if len(info_list) == 1 and '=' not in info_list[0]: info_elem["identifier"] = info_list[0] self['info'].append(info_elem) else: for info_item in info_list: split_info = info_item.split("=", 1) # pull identifier from update --info if ":" in split_info[0]: split_id = split_info[0].split(":") info_elem["identifier"] = split_id[0] split_info[0] = split_id[1] info_elem[split_info[0]] = split_info[1] self['info'].append(info_elem) def getSynopsis(self): return "Usage: buildbot user [options]" def _checkValidTypes(self, info): from buildbot.process.users import users valid = set(['identifier', 'email'] + users.srcs) for user in info: for attr_type in user: if attr_type not in valid: raise usage.UsageError( "Type not a valid attr_type, must be in: %s" % ', '.join(valid)) def postOptions(self): super().postOptions() validateMasterOption(self.get('master')) op = self.get('op') if not op: raise usage.UsageError("you must specify an operation: add, " "remove, update, get") if op not in ['add', 'remove', 'update', 'get']: raise usage.UsageError("bad op %r, use 'add', 'remove', 'update', " "or 'get'" % op) if not self.get('username') or not self.get('passwd'): raise usage.UsageError("A username and password must be given") bb_username = self.get('bb_username') bb_password = self.get('bb_password') if bb_username or bb_password: if op != 'update': raise usage.UsageError("bb_username and bb_password only work " "with update") if not bb_username or not bb_password: raise usage.UsageError("Must specify both bb_username and " "bb_password or neither.") info = self.get('info') ids = self.get('ids') # check for erroneous args if not info and not ids: raise usage.UsageError("must specify either --ids or --info") if op in ('add', 'update'): if ids: raise usage.UsageError("cannot use --ids with 'add' or " "'update'") self._checkValidTypes(info) if op == 'update': for user in info: if 'identifier' not in user: raise usage.UsageError("no ids found in update info; " "use: --info=id:type=value,type=value,..") if op == 'add': for user in info: if 'identifier' in user: raise usage.UsageError("identifier found in add info, " "use: --info=type=value,type=value,..") if op in ('remove', 'get'): if info: raise usage.UsageError("cannot use --info with 'remove' " "or 'get'") class DataSpecOption(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.dataspec.dataspec" optParameters = [ ['out', 'o', "dataspec.json", "output to specified path"], ['global', 'g', None, "output a js script, that sets a global, for inclusion in testsuite"], ] def getSynopsis(self): return "Usage: buildbot dataspec [options]" class DevProxyOptions(base.BasedirMixin, base.SubcommandOptions): """Run a fake web server serving the local ui frontend and a distant rest and websocket api. This command required aiohttp to be installed in the virtualenv""" subcommandFunction = "buildbot.scripts.devproxy.devproxy" optFlags = [ ["unsafe_ssl", None, "Bypass ssl certificate validation"], ] optParameters = [ ["port", "p", 8011, "http port to use"], ["plugins", None, None, "plugin config to use. As json string e.g: --plugins='{\"custom_plugin\": {\"option1\": true}}'"], ["auth_cookie", None, None, "TWISTED_SESSION cookie to be used for auth (taken in developer console: in document.cookie variable)"], ["buildbot_url", "b", "https://buildbot.buildbot.net", "real buildbot url to proxy to (can be http or https)"] ] class CleanupDBOptions(base.BasedirMixin, base.SubcommandOptions): subcommandFunction = "buildbot.scripts.cleanupdb.cleanupDatabase" optFlags = [ ["quiet", "q", "Do not emit the commands being run"], ["force", "f", "Force log recompression (useful when changing compression algorithm)"], # when this command has several maintenance jobs, we should make # them optional here. For now there is only one. ] optParameters = [ ] def getSynopsis(self): return "Usage: buildbot cleanupdb [options] []" longdesc = textwrap.dedent(""" This command takes an existing buildmaster working directory and do some optimization on the database. This command is frontend for various database maintenance jobs: - optimiselogs: This optimization groups logs into bigger chunks to apply higher level of compression. This command uses the database specified in the master configuration file. If you wish to use a database other than the default (sqlite), be sure to set that parameter before upgrading. """) class Options(usage.Options): synopsis = "Usage: buildbot [command options]" subCommands = [ ['create-master', None, CreateMasterOptions, "Create and populate a directory for a new buildmaster"], ['upgrade-master', None, UpgradeMasterOptions, "Upgrade an existing buildmaster directory for the current version"], ['start', None, StartOptions, "Start a buildmaster"], ['stop', None, StopOptions, "Stop a buildmaster"], ['restart', None, RestartOptions, "Restart a buildmaster"], ['reconfig', None, ReconfigOptions, "SIGHUP a buildmaster to make it re-read the config file"], ['sighup', None, ReconfigOptions, "SIGHUP a buildmaster to make it re-read the config file"], ['sendchange', None, SendChangeOptions, "Send a change to the buildmaster"], ['try', None, TryOptions, "Run a build with your local changes"], ['tryserver', None, TryServerOptions, "buildmaster-side 'try' support function, not for users"], ['checkconfig', None, CheckConfigOptions, "test the validity of a master.cfg config file"], ['user', None, UserOptions, "Manage users in buildbot's database"], ['dataspec', None, DataSpecOption, "Output data api spec"], ['dev-proxy', None, DevProxyOptions, "Run a fake web server serving the local ui frontend and a distant rest and websocket api." ], ['cleanupdb', None, CleanupDBOptions, "cleanup the database" ] ] def opt_version(self): print("Buildbot version: %s" % buildbot.version) super().opt_version() def opt_verbose(self): from twisted.python import log log.startLogging(sys.stderr) def postOptions(self): if not hasattr(self, 'subOptions'): raise usage.UsageError("must specify a command") def run(): config = Options() check_functional_environment(buildbot.config) try: config.parseOptions(sys.argv[1:]) except usage.error as e: print("%s: %s" % (sys.argv[0], e)) print() c = getattr(config, 'subOptions', config) print(str(c)) sys.exit(1) subconfig = config.subOptions subcommandFunction = reflect.namedObject(subconfig.subcommandFunction) sys.exit(subcommandFunction(subconfig)) buildbot-2.6.0/master/buildbot/scripts/sample.cfg000066400000000000000000000077171361162603000220650ustar00rootroot00000000000000# -*- python -*- # ex: set filetype=python: from buildbot.plugins import * # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} ####### WORKERS # The 'workers' list defines the set of recognized workers. Each element is # a Worker object, specifying a unique worker name and password. The same # worker name and password must be configured on the worker. c['workers'] = [worker.Worker("example-worker", "pass")] # 'protocols' contains information about protocols which master will use for # communicating with workers. You must define at least 'port' option that workers # could connect to your master with this protocol. # 'port' must match the value configured into the workers (with their # --master option) c['protocols'] = {'pb': {'port': 9989}} ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. Here we point to the buildbot version of a python hello-world project. c['change_source'] = [] c['change_source'].append(changes.GitPoller( 'git://github.com/buildbot/hello-world.git', workdir='gitpoller-workdir', branch='master', pollInterval=300)) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests"])) ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which workers can execute them. Note that any particular build will # only take place on one worker. factory = util.BuildFactory() # check out the source factory.addStep(steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) # run the tests (note that this will require that 'trial' is installed) factory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests", workernames=["example-worker"], factory=factory)) ####### BUILDBOT SERVICES # 'services' is a list of BuildbotService items like reporter targets. The # status of each build will be pushed to these targets. buildbot/reporters/*.py # has a variety to choose from, like IRC bots. c['services'] = [] ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). c['title'] = "Hello World CI" c['titleURL'] = "https://buildbot.github.io/hello-world/" # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. c['buildbotURL'] = "http://localhost:8010/" # minimalistic config to activate new web UI c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={})) ####### DB URL c['db'] = { # This specifies what database buildbot uses to store its state. # It's easy to start with sqlite, but it's recommended to switch to a dedicated # database, such as PostgreSQL or MySQL, for use in production environments. # http://docs.buildbot.net/current/manual/configuration/global.html#database-specification 'db_url' : "sqlite:///state.sqlite", } buildbot-2.6.0/master/buildbot/scripts/sendchange.py000066400000000000000000000040121361162603000225550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import traceback from twisted.internet import defer from buildbot.clients import sendchange as sendchange_client from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def sendchange(config): encoding = config.get('encoding', 'utf8') who = config.get('who') auth = config.get('auth') master = config.get('master') branch = config.get('branch') category = config.get('category') revision = config.get('revision') properties = config.get('properties', {}) repository = config.get('repository', '') vc = config.get('vc', None) project = config.get('project', '') revlink = config.get('revlink', '') when = config.get('when') comments = config.get('comments') files = config.get('files', ()) codebase = config.get('codebase', None) s = sendchange_client.Sender(master, auth, encoding=encoding) try: yield s.send(branch, revision, comments, files, who=who, category=category, when=when, properties=properties, repository=repository, vc=vc, project=project, revlink=revlink, codebase=codebase) except Exception: print("change not sent:") traceback.print_exc(file=sys.stdout) return 1 else: print("change sent successfully") return 0 buildbot-2.6.0/master/buildbot/scripts/start.py000066400000000000000000000143541361162603000216250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import textwrap from twisted.internet import protocol from twisted.internet import reactor from twisted.python.runtime import platformType from buildbot.scripts import base from buildbot.scripts.logwatcher import BuildmasterStartupError from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.util import rewrap class Follower: def follow(self, basedir, timeout=None): self.rc = 0 self._timeout = timeout if timeout else 10.0 print("Following twistd.log until startup finished..") lw = LogWatcher(os.path.join(basedir, "twistd.log"), timeout=self._timeout) d = lw.start() d.addCallbacks(self._success, self._failure) reactor.run() return self.rc def _success(self, _): print("The buildmaster appears to have (re)started correctly.") self.rc = 0 reactor.stop() def _failure(self, why): if why.check(BuildmasterTimeoutError): print(rewrap("""\ The buildmaster took more than {0} seconds to start, so we were unable to confirm that it started correctly. Please 'tail twistd.log' and look for a line that says 'BuildMaster is running' to verify correct startup. """.format(self._timeout))) elif why.check(ReconfigError): print(rewrap("""\ The buildmaster appears to have encountered an error in the master.cfg config file during startup. Please inspect and fix master.cfg, then restart the buildmaster. """)) elif why.check(BuildmasterStartupError): print(rewrap("""\ The buildmaster startup failed. Please see 'twistd.log' for possible reason. """)) else: print(rewrap("""\ Unable to confirm that the buildmaster started correctly. You may need to stop it, fix the config file, and restart. """)) print(why) self.rc = 1 reactor.stop() def launchNoDaemon(config): os.chdir(config['basedir']) sys.path.insert(0, os.path.abspath(config['basedir'])) argv = ["twistd", "--no_save", "--nodaemon", "--logfile=twistd.log", # windows doesn't use the same default "--python=buildbot.tac"] if platformType != 'win32': # windows doesn't use pidfile option. argv.extend(["--pidfile="]) sys.argv = argv # this is copied from bin/twistd. twisted-2.0.0 through 2.4.0 use # _twistw.run . Twisted-2.5.0 and later use twistd.run, even for # windows. from twisted.scripts import twistd twistd.run() def launch(config): os.chdir(config['basedir']) sys.path.insert(0, os.path.abspath(config['basedir'])) # see if we can launch the application without actually having to # spawn twistd, since spawning processes correctly is a real hassle # on windows. argv = [sys.executable, "-c", # this is copied from bin/twistd. twisted-2.0.0 through 2.4.0 use # _twistw.run . Twisted-2.5.0 and later use twistd.run, even for # windows. "from twisted.scripts import twistd; twistd.run()", "--no_save", "--logfile=twistd.log", # windows doesn't use the same default "--python=buildbot.tac"] # ProcessProtocol just ignores all output proc = reactor.spawnProcess( protocol.ProcessProtocol(), sys.executable, argv, env=os.environ) if platformType == "win32": with open("twistd.pid", "w") as pidfile: pidfile.write("{0}".format(proc.pid)) def py2Warning(config): if sys.version[0] == '2' and not config['quiet']: print(textwrap.dedent("""\ WARNING: You are running Buildbot with Python 2.7.x ! ----------------------------------------------------- Python 2 is going unmaintained as soon as 2020: https://pythonclock.org/ To prepare for that transition, we recommend upgrading your buildmaster to run on Python 3.6 now! Buildbot open source project is as well deprecating running buildmaster on Python 2 for better maintainability. Buildbot 2.0 going to be released in February 2019 will remove support for Python < 3.5 https://github.com/buildbot/buildbot/issues/4439 On most installations, switching to Python 3 can be accomplished by running the 2to3 tool over the master.cfg file. https://docs.python.org/3.7/library/2to3.html Note that the above applies only for the buildmaster. Workers will still support running under Python 2.7. Additionally, the buildmaster still supports workers using old versions of Buildbot. """)) def start(config): if not base.isBuildmasterDir(config['basedir']): return 1 py2Warning(config) if config['nodaemon']: launchNoDaemon(config) return 0 launch(config) # We don't have tail on windows if platformType == "win32" or config['quiet']: return 0 # this is the parent timeout = config.get('start_timeout', None) if timeout is not None: try: timeout = float(timeout) except ValueError: print('Start timeout must be a number') return 1 rc = Follower().follow(config['basedir'], timeout=timeout) return rc buildbot-2.6.0/master/buildbot/scripts/stop.py000066400000000000000000000044751361162603000214600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import errno import os import signal import time from twisted.python.runtime import platformType from buildbot.scripts import base def stop(config, signame="TERM", wait=None): basedir = config['basedir'] quiet = config['quiet'] if wait is None: wait = not config['no-wait'] if config['clean']: signame = 'USR1' if not base.isBuildmasterDir(config['basedir']): return 1 pidfile = os.path.join(basedir, 'twistd.pid') try: with open(pidfile, "rt") as f: pid = int(f.read().strip()) except Exception: if not config['quiet']: print("buildmaster not running") return 0 signum = getattr(signal, "SIG" + signame) try: os.kill(pid, signum) except OSError as e: if e.errno != errno.ESRCH and platformType != "win32": raise if not config['quiet']: print("buildmaster not running") try: os.unlink(pidfile) except OSError: pass return 0 if not wait: if not quiet: print("sent SIG%s to process" % signame) return 0 time.sleep(0.1) # poll once per second until twistd.pid goes away, up to 10 seconds, # unless we're doing a clean stop, in which case wait forever count = 0 while count < 10 or config['clean']: try: os.kill(pid, 0) except OSError: if not quiet: print("buildbot process %d is dead" % pid) return 0 time.sleep(1) count += 1 if not quiet: print("never saw process go away") return 1 buildbot-2.6.0/master/buildbot/scripts/trycmd.py000066400000000000000000000014711361162603000217660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def trycmd(config): from buildbot.clients import tryclient t = tryclient.Try(config) t.run() return 0 buildbot-2.6.0/master/buildbot/scripts/tryserver.py000066400000000000000000000027051361162603000225320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import time from hashlib import md5 from buildbot.util import unicode2bytes def tryserver(config): jobdir = os.path.expanduser(config["jobdir"]) job = sys.stdin.read() # now do a 'safecat'-style write to jobdir/tmp, then move atomically to # jobdir/new . Rather than come up with a unique name randomly, I'm just # going to MD5 the contents and prepend a timestamp. timestring = "%d" % time.time() m = md5() job = unicode2bytes(job) m.update(job) jobhash = m.hexdigest() fn = "%s-%s" % (timestring, jobhash) tmpfile = os.path.join(jobdir, "tmp", fn) newfile = os.path.join(jobdir, "new", fn) with open(tmpfile, "wb") as f: f.write(job) os.rename(tmpfile, newfile) return 0 buildbot-2.6.0/master/buildbot/scripts/upgrade_master.py000066400000000000000000000120121361162603000234570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import sys import traceback from twisted.internet import defer from twisted.python import util from buildbot import monkeypatches from buildbot.db import connector from buildbot.master import BuildMaster from buildbot.scripts import base from buildbot.util import in_reactor from buildbot.util import stripUrlPassword def installFile(config, target, source, overwrite=False): with open(source, "rt") as f: new_contents = f.read() if os.path.exists(target): with open(target, "rt") as f: old_contents = f.read() if old_contents != new_contents: if overwrite: if not config['quiet']: print("%s has old/modified contents" % target) print(" overwriting it with new contents") with open(target, "wt") as f: f.write(new_contents) else: if not config['quiet']: print("%s has old/modified contents" % target) print(" writing new contents to %s.new" % target) with open(target + ".new", "wt") as f: f.write(new_contents) # otherwise, it's up to date else: if not config['quiet']: print("creating %s" % target) with open(target, "wt") as f: f.write(new_contents) def upgradeFiles(config): if not config['quiet']: print("upgrading basedir") webdir = os.path.join(config['basedir'], "public_html") if os.path.exists(webdir): print("Notice: public_html is not used starting from Buildbot 0.9.0") print(" consider using third party HTTP server for serving " "static files") installFile(config, os.path.join(config['basedir'], "master.cfg.sample"), util.sibpath(__file__, "sample.cfg"), overwrite=True) @defer.inlineCallbacks def upgradeDatabase(config, master_cfg): if not config['quiet']: print("upgrading database (%s)" % (stripUrlPassword(master_cfg.db['db_url']))) print("Warning: Stopping this process might cause data loss") def sighandler(signum, frame): msg = " ".join(""" WARNING: ignoring signal %s. This process should not be interrupted to avoid database corruption. If you really need to terminate it, use SIGKILL. """.split()) print(msg % signum) prev_handlers = {} try: for signame in ("SIGTERM", "SIGINT", "SIGQUIT", "SIGHUP", "SIGUSR1", "SIGUSR2", "SIGBREAK"): if hasattr(signal, signame): signum = getattr(signal, signame) prev_handlers[signum] = signal.signal(signum, sighandler) master = BuildMaster(config['basedir']) master.config = master_cfg master.db.disownServiceParent() db = connector.DBConnector(basedir=config['basedir']) yield db.setServiceParent(master) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade() yield db.masters.setAllMastersActiveLongTimeAgo() finally: # restore previous signal handlers for signum, handler in prev_handlers.items(): signal.signal(signum, handler) @in_reactor def upgradeMaster(config, _noMonkey=False): if not _noMonkey: # pragma: no cover monkeypatches.patch_all() if not base.checkBasedir(config): return defer.succeed(1) os.chdir(config['basedir']) try: configFile = base.getConfigFileFromTac(config['basedir']) except (SyntaxError, ImportError): print("Unable to load 'buildbot.tac' from '%s':" % config['basedir'], file=sys.stderr) e = traceback.format_exc() print(e, file=sys.stderr) return defer.succeed(1) master_cfg = base.loadConfig(config, configFile) if not master_cfg: return defer.succeed(1) return _upgradeMaster(config, master_cfg) @defer.inlineCallbacks def _upgradeMaster(config, master_cfg): try: upgradeFiles(config) yield upgradeDatabase(config, master_cfg) except Exception: e = traceback.format_exc() print("problem while upgrading!:\n" + e, file=sys.stderr) return 1 else: if not config['quiet']: print("upgrade complete") return 0 buildbot-2.6.0/master/buildbot/scripts/user.py000066400000000000000000000032101361162603000214330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.clients import usersclient from buildbot.process.users import users from buildbot.util import in_reactor @in_reactor @defer.inlineCallbacks def user(config): master = config.get('master') op = config.get('op') username = config.get('username') passwd = config.get('passwd') master, port = master.split(":") port = int(port) bb_username = config.get('bb_username') bb_password = config.get('bb_password') if bb_username or bb_password: bb_password = users.encrypt(bb_password) info = config.get('info') ids = config.get('ids') # find identifier if op == add if info and op == 'add': for user in info: user['identifier'] = sorted(user.values())[0] uc = usersclient.UsersClient(master, username, passwd, port) output = yield uc.send(op, bb_username, bb_password, ids, info) if output: print(output) return 0 buildbot-2.6.0/master/buildbot/scripts/windows_service.py000077500000000000000000000535651361162603000237140ustar00rootroot00000000000000# pylint: disable=import-outside-toplevel # # Runs the build-bot as a Windows service. # To use: # * Install and configure buildbot as per normal (ie, running # 'setup.py install' from the source directory). # # * Configure any number of build-bot directories (workers or masters), as # per the buildbot instructions. Test these directories normally by # using the (possibly modified) "buildbot.bat" file and ensure everything # is working as expected. # # * Install the buildbot service. Execute the command: # % buildbot_windows_service # To see installation options. You probably want to specify: # + --username and --password options to specify the user to run the # + --startup auto to have the service start at boot time. # # For example: # % buildbot_windows_service --user mark --password secret \ # --startup auto install # Alternatively, you could execute: # % buildbot_windows_service install # to install the service with default options, then use Control Panel # to configure it. # # * Start the service specifying the name of all buildbot directories as # service args. This can be done one of 2 ways: # - Execute the command: # % buildbot_windows_service start "dir_name1" "dir_name2" # or: # - Start Control Panel->Administrative Tools->Services # - Locate the previously installed buildbot service. # - Open the "properties" for the service. # - Enter the directory names into the "Start Parameters" textbox. The # directory names must be fully qualified, and surrounded in quotes if # they include spaces. # - Press the "Start"button. # Note that the service will automatically use the previously specified # directories if no arguments are specified. This means the directories # need only be specified when the directories to use have changed (and # therefore also the first time buildbot is configured) # # * The service should now be running. You should check the Windows # event log. If all goes well, you should see some information messages # telling you the buildbot has successfully started. # # * If you change the buildbot configuration, you must restart the service. # There is currently no way to ask a running buildbot to reload the # config. You can restart by executing: # % buildbot_windows_service restart # # Troubleshooting: # * Check the Windows event log for any errors. # * Check the "twistd.log" file in your buildbot directories - once each # bot has been started it just writes to this log as normal. # * Try executing: # % python buildbot_service.py debug # This will execute the buildbot service in "debug" mode, and allow you to # see all messages etc generated. If the service works in debug mode but # not as a real service, the error probably relates to the environment or # permissions of the user configured to run the service (debug mode runs as # the currently logged in user, not the service user) # * Ensure you have the latest pywin32 build available, at least version 206. # Written by Mark Hammond, 2006. import os import sys import threading import pywintypes import servicemanager import win32api import win32con import win32event import win32file import win32pipe import win32process import win32security import win32service import win32serviceutil import winerror # Are we running in a py2exe environment? is_frozen = hasattr(sys, "frozen") # Taken from the Zope service support - each "child" is run as a sub-process # (trying to run multiple twisted apps in the same process is likely to screw # stdout redirection etc). # Note that unlike the Zope service, we do *not* attempt to detect a failed # client and perform restarts - buildbot itself does a good job # at reconnecting, and Windows itself provides restart semantics should # everything go pear-shaped. # We execute a new thread that captures the tail of the output from our child # process. If the child fails, it is written to the event log. # This process is unconditional, and the output is never written to disk # (except obviously via the event log entry) # Size of the blocks we read from the child process's output. CHILDCAPTURE_BLOCK_SIZE = 80 # The number of BLOCKSIZE blocks we keep as process output. CHILDCAPTURE_MAX_BLOCKS = 200 class BBService(win32serviceutil.ServiceFramework): _svc_name_ = 'BuildBot' _svc_display_name_ = _svc_name_ _svc_description_ = 'Manages local buildbot workers and masters - ' \ 'see http://buildbot.net' def __init__(self, args): super().__init__(args) # Create an event which we will use to wait on. The "service stop" # request will set this event. # * We must make it inheritable so we can pass it to the child # process via the cmd-line # * Must be manual reset so each child process and our service # all get woken from a single set of the event. sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True self.hWaitStop = win32event.CreateEvent(sa, True, False, None) self.args = args self.dirs = None self.runner_prefix = None # Patch up the service messages file in a frozen exe. # (We use the py2exe option that magically bundles the .pyd files # into the .zip file - so servicemanager.pyd doesn't exist.) if is_frozen and servicemanager.RunningAsService(): msg_file = os.path.join(os.path.dirname(sys.executable), "buildbot.msg") if os.path.isfile(msg_file): servicemanager.Initialize("BuildBot", msg_file) else: self.warning("Strange - '%s' does not exist" % (msg_file, )) def _checkConfig(self): # Locate our child process runner (but only when run from source) if not is_frozen: # Running from source python_exe = os.path.join(sys.prefix, "python.exe") if not os.path.isfile(python_exe): # for ppl who build Python itself from source. python_exe = os.path.join(sys.prefix, "PCBuild", "python.exe") if not os.path.isfile(python_exe): # virtualenv support python_exe = os.path.join(sys.prefix, "Scripts", "python.exe") if not os.path.isfile(python_exe): self.error("Can not find python.exe to spawn subprocess") return False me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = me[:-1] self.runner_prefix = '"%s" "%s"' % (python_exe, me) else: # Running from a py2exe built executable - our child process is # us (but with the funky cmdline args!) self.runner_prefix = '"' + sys.executable + '"' # Now our arg processing - this may be better handled by a # twisted/buildbot style config file - but as of time of writing, # MarkH is clueless about such things! # Note that the "arguments" you type into Control Panel for the # service do *not* persist - they apply only when you click "start" # on the service. When started by Windows, args are never presented. # Thus, it is the responsibility of the service to persist any args. # so, when args are presented, we save them as a "custom option". If # they are not presented, we load them from the option. self.dirs = [] if len(self.args) > 1: dir_string = os.pathsep.join(self.args[1:]) save_dirs = True else: dir_string = win32serviceutil.GetServiceCustomOption(self, "directories") save_dirs = False if not dir_string: self.error("You must specify the buildbot directories as " "parameters to the service.\nStopping the service.") return False dirs = dir_string.split(os.pathsep) for d in dirs: d = os.path.abspath(d) sentinal = os.path.join(d, "buildbot.tac") if os.path.isfile(sentinal): self.dirs.append(d) else: msg = "Directory '%s' is not a buildbot dir - ignoring" \ % (d, ) self.warning(msg) if not self.dirs: self.error("No valid buildbot directories were specified.\n" "Stopping the service.") return False if save_dirs: dir_string = os.pathsep.join(self.dirs) win32serviceutil.SetServiceCustomOption(self, "directories", dir_string) return True def SvcStop(self): # Tell the SCM we are starting the stop process. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Set the stop event - the main loop takes care of termination. win32event.SetEvent(self.hWaitStop) # SvcStop only gets triggered when the user explicitly stops (or restarts) # the service. To shut the service down cleanly when Windows is shutting # down, we also need to hook SvcShutdown. SvcShutdown = SvcStop def SvcDoRun(self): if not self._checkConfig(): # stopped status set by caller. return self.logmsg(servicemanager.PYS_SERVICE_STARTED) child_infos = [] for bbdir in self.dirs: self.info("Starting BuildBot in directory '%s'" % (bbdir, )) hstop = self.hWaitStop cmd = '%s --spawn %d start --nodaemon %s' % ( self.runner_prefix, hstop, bbdir) # print "cmd is", cmd h, t, output = self.createProcess(cmd) child_infos.append((bbdir, h, t, output)) while child_infos: handles = [self.hWaitStop] + [i[1] for i in child_infos] rc = win32event.WaitForMultipleObjects(handles, 0, # bWaitAll win32event.INFINITE) if rc == win32event.WAIT_OBJECT_0: # user sent a stop service request break # A child process died. For now, just log the output # and forget the process. index = rc - win32event.WAIT_OBJECT_0 - 1 bbdir, dead_handle, _, output_blocks = \ child_infos[index] status = win32process.GetExitCodeProcess(dead_handle) output = "".join(output_blocks) if not output: output = ("The child process generated no output. " "Please check the twistd.log file in the " "indicated directory.") self.warning("BuildBot for directory %r terminated with " "exit code %d.\n%s" % (bbdir, status, output)) del child_infos[index] if not child_infos: self.warning("All BuildBot child processes have " "terminated. Service stopping.") # Either no child processes left, or stop event set. self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # The child processes should have also seen our stop signal # so wait for them to terminate. for bbdir, h, t, output in child_infos: for i in range(10): # 30 seconds to shutdown... self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) rc = win32event.WaitForSingleObject(h, 3000) if rc == win32event.WAIT_OBJECT_0: break # Process terminated - no need to try harder. if rc == win32event.WAIT_OBJECT_0: break self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # If necessary, kill it if win32process.GetExitCodeProcess(h) == win32con.STILL_ACTIVE: self.warning("BuildBot process at %r failed to terminate - " "killing it" % (bbdir, )) win32api.TerminateProcess(h, 3) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # Wait for the redirect thread - it should have died as the remote # process terminated. # As we are shutting down, we do the join with a little more care, # reporting progress as we wait (even though we never will ) for i in range(5): t.join(1) self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) if not t.isAlive(): break else: self.warning("Redirect thread did not stop!") # All done. self.logmsg(servicemanager.PYS_SERVICE_STOPPED) # # Error reporting/logging functions. # def logmsg(self, event): # log a service event using servicemanager.LogMsg try: servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, event, (self._svc_name_, " (%s)" % self._svc_display_name_)) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write INFO event", event, ":", details) except IOError: # No valid stdout! Ignore it. pass def _dolog(self, func, msg): try: func(msg) except win32api.error as details: # Failed to write a log entry - most likely problem is # that the event log is full. We don't want this to kill us try: print("FAILED to write event log entry:", details) print(msg) except IOError: pass def info(self, s): self._dolog(servicemanager.LogInfoMsg, s) def warning(self, s): self._dolog(servicemanager.LogWarningMsg, s) def error(self, s): self._dolog(servicemanager.LogErrorMsg, s) # Functions that spawn a child process, redirecting any output. # Although buildbot itself does this, it is very handy to debug issues # such as ImportErrors that happen before buildbot has redirected. def createProcess(self, cmd): hInputRead, hInputWriteTemp = self.newPipe() hOutReadTemp, hOutWrite = self.newPipe() pid = win32api.GetCurrentProcess() # This one is duplicated as inheritable. hErrWrite = win32api.DuplicateHandle(pid, hOutWrite, pid, 0, 1, win32con.DUPLICATE_SAME_ACCESS) # These are non-inheritable duplicates. hOutRead = self.dup(hOutReadTemp) hInputWrite = self.dup(hInputWriteTemp) # dup() closed hOutReadTemp, hInputWriteTemp si = win32process.STARTUPINFO() si.hStdInput = hInputRead si.hStdOutput = hOutWrite si.hStdError = hErrWrite si.dwFlags = win32process.STARTF_USESTDHANDLES | \ win32process.STARTF_USESHOWWINDOW si.wShowWindow = win32con.SW_HIDE # pass True to allow handles to be inherited. Inheritance is # problematic in general, but should work in the controlled # circumstances of a service process. create_flags = win32process.CREATE_NEW_CONSOLE # info is (hProcess, hThread, pid, tid) info = win32process.CreateProcess(None, cmd, None, None, True, create_flags, None, None, si) # (NOTE: these really aren't necessary for Python - they are closed # as soon as they are collected) hOutWrite.Close() hErrWrite.Close() hInputRead.Close() # We don't use stdin hInputWrite.Close() # start a thread collecting output blocks = [] t = threading.Thread(target=self.redirectCaptureThread, args=(hOutRead, blocks)) t.start() return info[0], t, blocks def redirectCaptureThread(self, handle, captured_blocks): # One of these running per child process we are watching. It # handles both stdout and stderr on a single handle. The read data is # never referenced until the thread dies - so no need for locks # around self.captured_blocks. # self.info("Redirect thread starting") while True: try: ec, data = win32file.ReadFile(handle, CHILDCAPTURE_BLOCK_SIZE) except pywintypes.error as err: # ERROR_BROKEN_PIPE means the child process closed the # handle - ie, it terminated. if err[0] != winerror.ERROR_BROKEN_PIPE: self.warning("Error reading output from process: %s" % err) break captured_blocks.append(data) del captured_blocks[CHILDCAPTURE_MAX_BLOCKS:] handle.Close() # self.info("Redirect capture thread terminating") def newPipe(self): sa = win32security.SECURITY_ATTRIBUTES() sa.bInheritHandle = True return win32pipe.CreatePipe(sa, 0) def dup(self, pipe): # create a duplicate handle that is not inherited, so that # it can be closed in the parent. close the original pipe in # the process. pid = win32api.GetCurrentProcess() dup = win32api.DuplicateHandle(pid, pipe, pid, 0, 0, win32con.DUPLICATE_SAME_ACCESS) pipe.Close() return dup # Service registration and startup def RegisterWithFirewall(exe_name, description): # Register our executable as an exception with Windows Firewall. # taken from http://msdn.microsoft.com/library/default.asp?url=\ # /library/en-us/ics/ics/wf_adding_an_application.asp from win32com.client import Dispatch # Scope NET_FW_SCOPE_ALL = 0 # IP Version - ANY is the only allowable setting for now NET_FW_IP_VERSION_ANY = 2 fwMgr = Dispatch("HNetCfg.FwMgr") # Get the current profile for the local firewall policy. profile = fwMgr.LocalPolicy.CurrentProfile app = Dispatch("HNetCfg.FwAuthorizedApplication") app.ProcessImageFileName = exe_name app.Name = description app.Scope = NET_FW_SCOPE_ALL # Use either Scope or RemoteAddresses, but not both # app.RemoteAddresses = "*" app.IpVersion = NET_FW_IP_VERSION_ANY app.Enabled = True # Use this line if you want to add the app, but disabled. # app.Enabled = False profile.AuthorizedApplications.Add(app) # A custom install function. def CustomInstall(opts): # Register this process with the Windows Firewall import pythoncom try: RegisterWithFirewall(sys.executable, "BuildBot") except pythoncom.com_error as why: print("FAILED to register with the Windows firewall") print(why) # Magic code to allow shutdown. Note that this code is executed in # the *child* process, by way of the service process executing us with # special cmdline args (which includes the service stop handle!) def _RunChild(runfn): del sys.argv[1] # The --spawn arg. # Create a new thread that just waits for the event to be signalled. t = threading.Thread(target=_WaitForShutdown, args=(int(sys.argv[1]), ) ) del sys.argv[1] # The stop handle # This child process will be sent a console handler notification as # users log off, or as the system shuts down. We want to ignore these # signals as the service parent is responsible for our shutdown. def ConsoleHandler(what): # We can ignore *everything* - ctrl+c will never be sent as this # process is never attached to a console the user can press the # key in! return True win32api.SetConsoleCtrlHandler(ConsoleHandler, True) t.setDaemon(True) # we don't want to wait for this to stop! t.start() if hasattr(sys, "frozen"): # py2exe sets this env vars that may screw our child process - reset del os.environ["PYTHONPATH"] # Start the buildbot/worker app runfn() print("Service child process terminating normally.") def _WaitForShutdown(h): win32event.WaitForSingleObject(h, win32event.INFINITE) print("Shutdown requested") from twisted.internet import reactor reactor.callLater(0, reactor.stop) def DetermineRunner(bbdir): '''Checks if the given directory is a worker or a master and returns the appropriate run function.''' tacfile = os.path.join(bbdir, 'buildbot.tac') if not os.path.exists(tacfile): # No tac-file - use master runner by default. import buildbot.scripts.runner return buildbot.scripts.runner.run with open(tacfile, 'r') as f: contents = f.read() try: if 'import Worker' in contents: import buildbot_worker.scripts.runner return buildbot_worker.scripts.runner.run except ImportError: # Not a worker. pass try: if 'import BuildSlave' in contents: import buildslave.scripts.runner return buildslave.scripts.runner.run except ImportError: # Not an old buildslave. pass # Treat as master by default. import buildbot.scripts.runner return buildbot.scripts.runner.run # This function is also called by the py2exe startup code. def HandleCommandLine(): if len(sys.argv) > 1 and sys.argv[1] == "--spawn": # Special command-line created by the service to execute the # child-process. # First arg is the handle to wait on # Fourth arg is the config directory to use for the buildbot/worker _RunChild(DetermineRunner(sys.argv[5])) else: win32serviceutil.HandleCommandLine(BBService, customOptionHandler=CustomInstall) if __name__ == '__main__': HandleCommandLine() buildbot-2.6.0/master/buildbot/secrets/000077500000000000000000000000001361162603000200705ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/secrets/__init__.py000066400000000000000000000013011361162603000221740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-2.6.0/master/buildbot/secrets/manager.py000066400000000000000000000027741361162603000220660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ manage providers and handle secrets """ from twisted.internet import defer from buildbot.secrets.secret import SecretDetails from buildbot.util import service class SecretManager(service.BuildbotServiceManager): """ Secret manager """ name = 'secrets' config_attr = "secretsProviders" @defer.inlineCallbacks def get(self, secret, *args, **kwargs): """ get secrets from the provider defined in the secret using args and kwargs @secrets: secrets keys @type: string @return type: SecretDetails """ for provider in self.services: value = yield provider.get(secret) source_name = provider.__class__.__name__ if value is not None: return SecretDetails(source_name, secret, value) buildbot-2.6.0/master/buildbot/secrets/providers/000077500000000000000000000000001361162603000221055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/secrets/providers/__init__.py000066400000000000000000000013011361162603000242110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-2.6.0/master/buildbot/secrets/providers/base.py000066400000000000000000000017731361162603000234010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ secret provider interface """ import abc from buildbot.util.service import BuildbotService class SecretProviderBase(BuildbotService): """ Secret provider base """ @abc.abstractmethod def get(self, *args, **kwargs): """ this should be an abstract method """ buildbot-2.6.0/master/buildbot/secrets/providers/file.py000066400000000000000000000063131361162603000234010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ file based provider """ import os import stat from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase class SecretInAFile(SecretProviderBase): """ secret is stored in a separate file under the given directory name """ name = "SecretInAFile" def checkFileIsReadOnly(self, dirname, secretfile): filepath = os.path.join(dirname, secretfile) obs_stat = stat.S_IMODE(os.stat(filepath).st_mode) if (obs_stat & 0o77) != 0 and os.name == "posix": config.error("Permissions %s on file %s are too open." " It is required that your secret files are NOT" " accessible by others!" % (oct(obs_stat), secretfile)) def checkSecretDirectoryIsAvailableAndReadable(self, dirname, suffixes): if not os.access(dirname, os.F_OK): config.error("directory %s does not exists" % dirname) for secretfile in os.listdir(dirname): for suffix in suffixes: if secretfile.endswith(suffix): self.checkFileIsReadOnly(dirname, secretfile) def loadSecrets(self, dirname, suffixes, strip): secrets = {} for secretfile in os.listdir(dirname): secretvalue = None for suffix in suffixes: if secretfile.endswith(suffix): with open(os.path.join(dirname, secretfile)) as source: secretvalue = source.read() if suffix: secretfile = secretfile[:-len(suffix)] if strip: secretvalue = secretvalue.rstrip("\r\n") secrets[secretfile] = secretvalue return secrets def checkConfig(self, dirname, suffixes=None, strip=True): self._dirname = dirname if suffixes is None: suffixes = [""] self.checkSecretDirectoryIsAvailableAndReadable(dirname, suffixes=suffixes) def reconfigService(self, dirname, suffixes=None, strip=True): self._dirname = dirname self.secrets = {} if suffixes is None: suffixes = [""] self.secrets = self.loadSecrets(self._dirname, suffixes=suffixes, strip=strip) def get(self, entry): """ get the value from the file identified by 'entry' """ return self.secrets.get(entry) buildbot-2.6.0/master/buildbot/secrets/providers/passwordstore.py000066400000000000000000000043761361162603000254100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ password store based provider """ import os from pathlib import Path from twisted.internet import defer from twisted.internet import utils from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase class SecretInPass(SecretProviderBase): """ secret is stored in a password store """ name = "SecretInPass" def checkPassIsInPath(self): if not any([(Path(p) / "pass").is_file() for p in os.environ["PATH"].split(":")]): config.error("pass does not exist in PATH") def checkPassDirectoryIsAvailableAndReadable(self, dirname): if not os.access(dirname, os.F_OK): config.error("directory %s does not exist" % dirname) def checkConfig(self, gpgPassphrase=None, dirname=None): self.checkPassIsInPath() if dirname: self.checkPassDirectoryIsAvailableAndReadable(dirname) def reconfigService(self, gpgPassphrase=None, dirname=None): self._env = {**os.environ} if gpgPassphrase: self._env["PASSWORD_STORE_GPG_OPTS"] = "--passphrase %s" % gpgPassphrase if dirname: self._env["PASSWORD_STORE_DIR"] = dirname @defer.inlineCallbacks def get(self, entry): """ get the value from pass identified by 'entry' """ try: output = yield utils.getProcessOutput( "pass", args=[entry], env=self._env ) return output.decode("utf-8", "ignore").splitlines()[0] except IOError: return None buildbot-2.6.0/master/buildbot/secrets/providers/vault.py000066400000000000000000000063051361162603000236160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ vault based providers """ from twisted.internet import defer from buildbot import config from buildbot.secrets.providers.base import SecretProviderBase from buildbot.util import httpclientservice class HashiCorpVaultSecretProvider(SecretProviderBase): """ basic provider where each secret is stored in Vault KV secret engine """ name = 'SecretInVault' def checkConfig(self, vaultServer=None, vaultToken=None, secretsmount=None, apiVersion=1): if not isinstance(vaultServer, str): config.error("vaultServer must be a string while it is %s" % (type(vaultServer,))) if not isinstance(vaultToken, str): config.error("vaultToken must be a string while it is %s" % (type(vaultToken,))) if apiVersion not in [1, 2]: config.error("apiVersion %s is not supported" % apiVersion) @defer.inlineCallbacks def reconfigService(self, vaultServer=None, vaultToken=None, secretsmount=None, apiVersion=1): if secretsmount is None: self.secretsmount = "secret" else: self.secretsmount = secretsmount self.vaultServer = vaultServer self.vaultToken = vaultToken self.apiVersion = apiVersion if vaultServer.endswith('/'): vaultServer = vaultServer[:-1] self._http = yield httpclientservice.HTTPClientService.getService( self.master, self.vaultServer, headers={'X-Vault-Token': self.vaultToken}) @defer.inlineCallbacks def get(self, entry): """ get the value from vault secret backend """ if self.apiVersion == 1: path = self.secretsmount + '/' + entry else: path = self.secretsmount + '/data/' + entry # note that the HTTP path contains v1 for both versions of the key-value # secret engine. Different versions of the key-value engine are # effectively separate secret engines in vault, with the same base HTTP # API, but with different paths within it. proj = yield self._http.get('/v1/{0}'.format(path)) code = yield proj.code if code != 200: raise KeyError("The key %s does not exist in Vault provider: request" " return code:%d." % (entry, code)) json = yield proj.json() if self.apiVersion == 1: ret = json.get('data', {}).get('value') else: ret = json.get('data', {}).get('data', {}).get('value') return ret buildbot-2.6.0/master/buildbot/secrets/secret.py000066400000000000000000000031331361162603000217270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class SecretDetails: """ A SecretDetails object has secrets attributes: - source: provider where the secret was retrieved - key: secret key identifier - value: secret value """ def __init__(self, source, key, value): self._source = source self._value = value self._key = key @property def source(self): """ source of the secret """ return self._source @property def value(self): """ secret value """ return self._value @property def key(self): """ secret value """ return self._key def __str__(self): return '{} {}: {!r}'.format(self._source, self._key, self.value) def __eq__(self, other): return (self._source == other._source and self.key == other.key and self.value == other.value) buildbot-2.6.0/master/buildbot/spec/000077500000000000000000000000001361162603000173525ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/spec/api.raml000066400000000000000000000643171361162603000210130ustar00rootroot00000000000000#%RAML 1.0 title: Buildbot Web API version: v2 mediaType: application/json traits: bbget: responses: 200: body: application/json: type: responseObjects.libraries.types.<> 404: body: text/plain: example: "not found" bbpost: body: type: <> responses: 200: body: application/json: type: <> 404: body: text/plain: example: "not found" bbgetraw: responses: 200: headers: content-disposition: description: content disposition header allows browser to save log file with proper filename example: attachment; filename=stdio body: text/html: description: "html data if the object is html" text/plain: description: "plain text data if the object is text" types: build: !include types/build.raml builder: !include types/builder.raml buildrequest: !include types/buildrequest.raml buildset: !include types/buildset.raml worker: !include types/worker.raml change: !include types/change.raml changesource: !include types/changesource.raml forcescheduler: !include types/forcescheduler.raml identifier: !include types/identifier.raml log: !include types/log.raml logchunk: !include types/logchunk.raml master: !include types/master.raml rootlink: !include types/rootlink.raml scheduler: !include types/scheduler.raml sourcedproperties: !include types/sourcedproperties.raml sourcestamp: !include types/sourcestamp.raml patch: !include types/patch.raml spec: !include types/spec.raml step: !include types/step.raml /: get: is: - bbget: {bbtype: rootlink} /application.spec: get: is: - bbget: {bbtype: spec} /builders: description: This path selects all builders get: is: - bbget: {bbtype: builder} /{builderid_or_buildername}: uriParameters: builderid_or_buildername: type: number | identifier description: the ID or name of the builder description: This path selects a builder by builderid get: is: - bbget: {bbtype: builder} /forceschedulers: description: This path selects all force-schedulers for a given builder get: is: - bbget: {bbtype: forcescheduler} /buildrequests: description: This path selects all buildrequests for a given builder (can return lots of data!) get: is: - bbget: {bbtype: buildrequest} /builds: description: This path selects all builds for a builder (can return lots of data!) get: is: - bbget: {bbtype: build} /{build_number}: uriParameters: build_number: type: number description: the number of the build within the builder description: This path selects a specific build by builderid, buildnumber get: is: - bbget: {bbtype: build} /actions/stop: post: description: | stops one build. body: application/json: properties: reason: type: string required: false description: The reason why the build was stopped results: type: integer required: false description: optionally results value override (default CANCELLED) /actions/rebuild: post: description: | rebuilds one build. body: application/json: description: no parameter are needed /steps: description: This path selects all steps for the given build. get: is: - bbget: {bbtype: step} /{step_name}: uriParameters: step_name: type: identifier description: the slug name of the step description: This path selects a specific step for the given build. get: is: - bbget: {bbtype: step} /logs: description: This path selects all logs for the given step. get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log get: description: | This path selects a specific log in the given step. is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This endpoint allows to get the raw logs for downloading into a file. This endpoint does not provide paging capabilities. For stream log types, the type line header characters are dropped. 'text/plain' is used as the mime type except for html logs, where 'text/html' is used. The 'slug' is used as the filename for the resulting download. Some browsers are appending ``".txt"`` or ``".html"`` to this filename according to the mime-type. is: - bbgetraw: /{step_number}: uriParameters: step_number: type: number description: the number of the step description: This path selects a specific step given its step number get: is: - bbget: {bbtype: step} /logs: description: This path selects all log of a a specific step get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log description: This path selects one log of a a specific step get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /workers: description: | This path selects all workers configured for a given builder get: is: - bbget: {bbtype: worker} /{name}: description: | This path selects a worker by name filtered by given builderid uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: | This path selects a worker by id filtered by given builderid uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /actions/stop: post: description: | gracefully shutdown one worker. body: application/json: properties: reason: type: string required: false description: The reason why the worker was stopped /actions/kill: post: description: | forcefully shutdown one worker. body: application/json: properties: reason: type: string required: false description: The reason why the worker was stopped /actions/pause: post: description: | Pause one worker. The worker will stop taking new build. body: application/json: properties: reason: type: string required: false description: The reason why the worker was paused /actions/unpause: post: description: | Unpause one worker. The worker will re-start taking builds. body: application/json: properties: reason: type: string required: false description: The reason why the worker was un-paused /masters: description: | This path selects all masters supporting a given builder get: is: - bbget: {bbtype: master} /{masterid}: uriParameters: masterid: type: number description: the id of the master description: | This path selects a master by id filtered by given builderid get: is: - bbget: {bbtype: master} /buildrequests: /{buildrequestid}: uriParameters: buildrequestid: type: number description: the id of the buildrequest get: is: - bbget: {bbtype: buildrequest} /builds: get: is: - bbget: {bbtype: build} /actions/cancel: post: description: | Cancel one buildrequest. If necessary, this will stop the builds generated by the buildrequest, including triggered builds. body: application/json: properties: reason: type: string required: false description: The reason why the buildrequest was cancelled get: is: - bbget: {bbtype: buildrequest} /builds: get: is: - bbget: {bbtype: build} /{buildid}: description: | This path selects a build by id uriParameters: buildid: type: number description: the id of the build get: is: - bbget: {bbtype: build} /actions/stop: post: description: | stops one build. body: application/json: properties: reason: type: string required: false description: The reason why the build was stopped /actions/rebuild: post: description: | rebuilds one build. body: application/json: description: no parameter are needed /changes: description: | This path selects all changes tested by a build get: is: - bbget: {bbtype: change} /properties: description: | This path selects all properties of a build get: is: - bbget: {bbtype: sourcedproperties} /steps: description: | This path selects all steps of a build get: is: - bbget: {bbtype: step} /{step_number_or_name}: uriParameters: step_number_or_name: type: identifier | number description: the name or number of the step description: | This path selects one step of a build get: is: - bbget: {bbtype: step} /logs: description: | This path selects all logs of a step of a build get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log description: This path selects one log of a a specific step get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /buildsets: description: this path selects all buildsets get: is: - bbget: {bbtype: buildset} /{bsid}: description: this path selects a buildset by id uriParameters: bsid: type: identifier description: the id of the buildset get: is: - bbget: {bbtype: buildset} /properties: description: | This path selects all properties of a buildset. Buildset properties is part of the initial properties of a build. get: is: - bbget: {bbtype: sourcedproperties} /workers: description: this path selects all workers get: is: - bbget: {bbtype: worker} /{name_or_id}: description: this path selects worker by name or id uriParameters: name_or_id: type: identifier | number description: the name or id of a worker get: is: - bbget: {bbtype: worker} /changes: description: | This path selects **all** changes. On a reasonably loaded master, this can quickly return a very large result, taking minutes to process. A specific query configuration is optimized which allows to get the recent changes: ``order:-changeid&limit=`` get: is: - bbget: {bbtype: change} /{changeid}: description: this path selects one change by id uriParameters: changeid: type: number description: the id of a change get: is: - bbget: {bbtype: change} /changesources: description: | This path selects all changesource. get: is: - bbget: {bbtype: changesource} /{changesourceid}: uriParameters: changesourceid: type: number description: the id of a changesource description: | This path selects one changesource given its id. get: is: - bbget: {bbtype: changesource} /forceschedulers: description: | This path selects all forceschedulers. get: is: - bbget: {bbtype: forcescheduler} /{schedulername}: description: | This path selects all changesource. uriParameters: schedulername: type: identifier description: the name of a scheduler get: is: - bbget: {bbtype: forcescheduler} /actions/force: post: description: | Triggers the forcescheduler body: application/json: properties: owner: type: string required: false description: The user who wants to create the buildrequest '[]': description: content of the forcescheduler parameter is dependent on the configuration of the forcescheduler /logs/{logid}: uriParameters: logid: type: number description: the id of the log description: This path selects one log get: is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: /masters: description: This path selects all masters get: is: - bbget: {bbtype: master} /{masterid}: description: This path selects one master given its id uriParameters: masterid: type: number description: the id of the master get: is: - bbget: {bbtype: master} /builders: description: This path selects all builders of a given master get: is: - bbget: {bbtype: builder} /{builderid}: description: This path selects one builder by id of a given master uriParameters: builderid: type: number description: the id of the builder get: is: - bbget: {bbtype: builder} /workers: description: This path selects all workers for a given builder and a given master get: is: - bbget: {bbtype: worker} /{name}: description: This path selects one workers by name for a given builder and a given master uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: This path selects one workers by name for a given builder and a given master uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /workers: description: This path selects all workers for a given master get: is: - bbget: {bbtype: worker} /{name}: description: This path selects one worker by name for a given master uriParameters: name: type: identifier description: the name of the worker get: is: - bbget: {bbtype: worker} /{workerid}: description: This path selects one worker by id for a given master uriParameters: workerid: type: number description: the id of the worker get: is: - bbget: {bbtype: worker} /changesources: description: This path selects all changesources for a given master get: is: - bbget: {bbtype: changesource} /{changesourceid}: description: This path selects one changesource by id for a given master get: is: - bbget: {bbtype: changesource} /schedulers: description: This path selects all schedulers for a given master get: is: - bbget: {bbtype: scheduler} /{schedulerid}: description: This path selects one scheduler by id for a given master uriParameters: schedulerid: type: number description: the id of the scheduler get: is: - bbget: {bbtype: scheduler} /schedulers: description: This path selects all schedulers get: is: - bbget: {bbtype: scheduler} /{schedulerid}: uriParameters: schedulerid: type: number description: the id of the scheduler description: This path selects one scheduler by id get: is: - bbget: {bbtype: scheduler} /sourcestamps: description: This path selects all sourcestamps (can return lots of data!) get: is: - bbget: {bbtype: sourcestamp} /{ssid}: description: This path selects one sourcestamp by id uriParameters: ssid: type: number description: the id of the sourcestamp get: is: - bbget: {bbtype: sourcestamp} /changes: description: This path selects all changes associated to one sourcestamp get: is: - bbget: {bbtype: change} /steps: /{stepid}: description: This path selects one step by id uriParameters: stepid: type: number description: the id of the step /logs: description: This path selects all logs for the given step. get: is: - bbget: {bbtype: log} /{log_slug}: uriParameters: log_slug: type: identifier description: the slug name of the log get: description: | This path selects a specific log in the given step. is: - bbget: {bbtype: log} /contents: get: description: | This path selects chunks from a specific log in the given step. is: - bbget: {bbtype: logchunk} /raw: get: description: | This path downloads the whole log is: - bbgetraw: buildbot-2.6.0/master/buildbot/spec/indent.py000066400000000000000000000014021361162603000212020ustar00rootroot00000000000000#!/usr/bin/python import re import sys spaces = re.compile("^ +") for fn in sys.argv[1:]: lines = [] with open(fn, 'r') as f: for line in f: lines.append(line) def getIndent(i): res = spaces.match(lines[i]) if res is None: return 0 return len(res.group(0)) def IndentBlock(i, numspaces): initIndent = getIndent(i) while i < len(lines) and initIndent <= getIndent(i): lines[i] = " " * numspaces + lines[i] i += 1 for i, line in enumerate(lines): missingIndent = 4 - (getIndent(i) % 4) if missingIndent != 4: IndentBlock(i, missingIndent) with open(fn, 'w') as f: for line in lines: f.write(line) buildbot-2.6.0/master/buildbot/spec/types/000077500000000000000000000000001361162603000205165ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/spec/types/build.raml000066400000000000000000000073731361162603000225040ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: Build description: | This resource type describes completed and in-progress builds. Much of the contextual data for a build is associated with the build request, and through it the buildset. .. note:: *properties* This properties dict is only filled out if the `properties filterspec` is set. Meaning that, `property filter` allows one to request the Builds DATA API like so: * api/v2/builds?property=propKey1&property=propKey2 (returns Build's properties which match given keys) * api/v2/builds?property=* (returns all Build's properties) * api/v2/builds?propKey1&property=propKey2&limit=30 (filters combination) .. important:: When combined with ``field`` filter, to get properties, one should ensure **properties** ``field`` is set. * api/v2/builds?field=buildid&field=properties&property=workername&property=user Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.builds.Build .. py:method:: newBuild(builderid, buildrequestid, workerid) :param integer builderid: builder performing this build :param integer buildrequstid: build request being built :param integer workerid: worker on which this build is performed :returns: (buildid, number) via Deferred Create a new build resource and return its ID. The state strings for the new build will be set to 'starting'. .. py:method:: setBuildStateString(buildid, state_string) :param integer buildid: the build to modify :param unicode state_string: new state string for this build Replace the existing state strings for a build with a new list. .. py:method:: finishBuild(buildid, results) :param integer buildid: the build to modify :param integer results: the build's results Mark the build as finished at the current time, with the given results. properties: buildid: description: the unique ID of this build type: integer number: description: the number of this build (sequential for a given builder) type: integer builderid: description: id of the builder for this build type: integer buildrequestid: description: build request for which this build was performed, or None if no such request exists type: integer workerid: description: the worker this build ran on type: integer masterid: description: the master this build ran on type: integer started_at: description: time at which this build started type: date complete: description: | true if this build is complete Note that this is a calculated field (from complete_at != None). Ordering by this field is not optimized by the database layer. type: boolean complete_at?: description: time at which this build was complete, or None if it's still running type: date properties?: description: a dictionary of properties attached to build. type: sourcedproperties results?: description: the results of the build (see :ref:`Build-Result-Codes`), or None if not complete type: integer state_string: description: a string giving detail on the state of the build. type: string type: object example: 'builderid': 10 buildid: 100 buildrequestid: 13 workerid: 20 complete: false complete_at: null masterid: 824 number: 1 results: null started_at: 1451001600 state_string: created properties: {} buildbot-2.6.0/master/buildbot/spec/types/builder.raml000066400000000000000000000022111361162603000230150ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a builder. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.builders.Builder .. py:method:: updateBuilderList(masterid, builderNames) :param integer masterid: this master's master ID :param list builderNames: list of names of currently-configured builders (unicode strings) :returns: Deferred Record the given builders as the currently-configured set of builders on this master. Masters should call this every time the list of configured builders changes. properties: builderid: description: the ID of this builder type: integer description?: description: The description for that builder type: string masterids[]: description: the ID of the masters this builder is running on type: integer name: description: builder name type: identifier tags[]: description: list of tags for this builder type: string type: object buildbot-2.6.0/master/buildbot/spec/types/buildrequest.raml000066400000000000000000000064371361162603000241150ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes completed and in-progress buildrequests. Much of the contextual data for a buildrequest is associated with the buildset that contains this buildrequest. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.buildrequests.BuildRequest .. py:method:: claimBuildRequests(brids, claimed_at=None) :param list(integer) brids: list of buildrequest id to claim :param datetime claimed_at: date and time when the buildrequest is claimed :returns: (boolean) whether claim succeeded or not Claim a list of buildrequests .. py:method:: unclaimBuildRequests(brids) :param list(integer) brids: list of buildrequest id to unclaim Unclaim a list of buildrequests .. py:method:: completeBuildRequests(brids, results, complete_at=None) :param list(integer) brids: list of buildrequest id to complete :param integer results: the results of the buildrequest (see :ref:`Build-Result-Codes`) :param datetime complete_at: date and time when the buildrequest is completed Complete a list of buildrequest with the ``results`` status properties: buildrequestid: description: the unique ID of this buildrequest type: integer builderid: description: the id of the builder linked to this buildrequest type: integer buildsetid: description: the id of the buildset that contains this buildrequest type: integer claimed: description: | True if this buildrequest has been claimed. Note that this is a calculated field (from claimed_at != None). Ordering by this field is not optimized by the database layer. type: boolean claimed_at?: description: | time at which this build has last been claimed. None if this buildrequest has never been claimed or has been unclaimed type: date claimed_by_masterid?: description: | the id of the master that claimed this buildrequest. None if this buildrequest has never been claimed or has been unclaimed type: integer complete: description: true if this buildrequest is complete type: boolean complete_at?: description: | time at which this buildrequest was completed, or None if it's still running type: date priority: description: the priority of this buildrequest type: integer properties?: description: a dictionary of properties corresponding to buildrequest. type: sourcedproperties results?: description: | the results of this buildrequest (see :ref:`Build-Result-Codes`), or None if not complete type: integer submitted_at: description: time at which this buildrequest were submitted type: date waited_for: description: | True if the entity that triggered this buildrequest is waiting for it to complete. Should be used by an (unimplemented so far) clean shutdown to only start br that are waited_for. type: boolean type: object buildbot-2.6.0/master/buildbot/spec/types/buildset.raml000066400000000000000000000073451361162603000232170ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A buildset gathers build requests that were scheduled at the same time, and which share a source stamp, properties, and so on. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.buildsets.Buildset .. py:method:: addBuildset(scheduler=None, sourcestamps=[], reason='', properties={}, builderids=[], external_idstring=None, parent_buildid=None, parent_relationship=None) :param string scheduler: the name of the scheduler creating this buildset :param list sourcestamps: sourcestamps for the new buildset; see below :param unicode reason: the reason for this build :param properties: properties to set on this buildset :type properties: dictionary with unicode keys and (source, property value) values :param list builderids: names of the builderids for which build requests should be created :param unicode external_idstring: arbitrary identifier to recognize this buildset later :param int parent_buildid: optional build id that is the parent for this buildset :param unicode parent_relationship: relationship identifier for the parent, this is a configured relationship between the parent build, and the childs buildsets :returns: (buildset id, dictionary mapping builder ids to build request ids) via Deferred .. warning: The ``scheduler`` parameter will be replaced with a ``schedulerid`` parameter in future releases. Create a new buildset and corresponding buildrequests based on the given parameters. This is the low-level interface for scheduling builds. Each sourcestamp in the list of sourcestamps can be given either as an integer, assumed to be a sourcestamp ID, or a dictionary of keyword arguments to be passed to :py:meth:`~buildbot.db.sourcestamps.SourceStampsConnectorComponent.findSourceStampId`. .. py:method:: maybeBuildsetComplete(bsid) :param integer bsid: buildset that may be complete :returns: Deferred This method should be called when a build request is finished. It checks the given buildset to see if all of its buildrequests are finished. If so, it updates the status of the buildset and send the appropriate messages. properties: bsid: description: the ID of this buildset type: integer complete: description: true if all of the build requests in this buildset are complete type: boolean complete_at?: description: the time this buildset was completed, or None if not complete type: integer external_idstring?: description: | an identifier that external applications can use to identify a submitted buildset; can be None type: string parent_buildid?: description: optional build id that is the parent for this buildset type: integer parent_relationship?: description: | relationship identifier for the parent, this is a configured relationship between the parent build, and the childs buildsets type: string reason: description: the reason this buildset was scheduled type: string results?: description: the results of the buildset (see :ref:`Build-Result-Codes`), or None if not complete type: integer sourcestamps[]: description: | the sourcestamps for this buildset; each element is a valid :bb:rtype:`sourcestamp` entity type: sourcestamp submitted_at: description: the time this buildset was submitted type: integer type: object buildbot-2.6.0/master/buildbot/spec/types/change.raml000066400000000000000000000074411361162603000226260ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A change resource represents a change to the source code monitored by Buildbot. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.changes.Change .. py:method:: addChange(files=None, comments=None, author=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties={}, repository='', codebase=None, project='', src=None) :param files: a list of filenames that were changed :type files: list of unicode strings :param unicode comments: user comments on the change :param unicode author: the author of this change :param unicode revision: the revision identifier for this change :param integer when_timestamp: when this change occurred (seconds since the epoch), or the current time if None :param unicode branch: the branch on which this change took place :param unicode category: category for this change :param string revlink: link to a web view of this revision :param properties: properties to set on this change. Note that the property source is *not* included in this dictionary. :type properties: dictionary with unicode keys and simple values (JSON-able). :param unicode repository: the repository in which this change took place :param unicode project: the project this change is a part of :param unicode src: source of the change (vcs or other) :returns: the ID of the new change, via Deferred Add a new change to Buildbot. This method is the interface between change sources and the rest of Buildbot. All parameters should be passed as keyword arguments. All parameters labeled 'unicode' must be unicode strings and not bytestrings. Filenames in ``files``, and property names, must also be unicode strings. This is tested by the fake implementation. properties: changeid: description: the ID of this change type: integer author: description: | the author of the change in "name", "name " or just "email" (with @) format type: string branch?: description: | branch on which the change took place, or none for the "default branch", whatever that might mean type: string category?: description: user-defined category of this change, or none type: string codebase: description: codebase in this repository type: string comments: description: | user comments for this change (aka commit) type: string files[]: description: list of source-code filenames changed type: string parent_changeids[]: description: | The ID of the parents. The data api allow for several parents, but the core buildbot does not yet support type: integer project: description: user-defined project to which this change corresponds type: string properties: description: user-specified properties for this change, represented as an object mapping keys to tuple (value, source) type: sourcedproperties repository: description: repository where this change occurred type: string revision?: description: revision for this change, or none if unknown type: string revlink?: description: link to a web view of this change type: string sourcestamp: description: the sourcestamp resource for this change type: sourcestamp when_timestamp: description: time of the change type: integer type: object buildbot-2.6.0/master/buildbot/spec/types/changesource.raml000066400000000000000000000025331361162603000240440ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A changesource generates change objects, for example in response to an update in some repository. A particular changesource (by name) runs on at most one master at a time. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.changesources.ChangeSource .. py:method:: findChangeSourceId(name) :param string name: changesource name :returns: changesource ID via Deferred Get the ID for the given changesource name, inventing one if necessary. .. py:method:: trySetChangeSourceMaster(changesourceid, masterid) :param integer changesourceid: changesource ID to try to claim :param integer masterid: this master's master ID :returns: ``True`` or ``False``, via Deferred Try to claim the given scheduler for the given master and return ``True`` if the scheduler is to be activated on that master. properties: changesourceid: description: the ID of this changesource type: integer master?: description: the master on which this worker is running, or None if it is inactive type: master name: description: name of this changesource type: string type: object buildbot-2.6.0/master/buildbot/spec/types/forcescheduler.raml000066400000000000000000000015751361162603000244000ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A forcescheduler initiates builds, via a formular in the web UI. At the moment, forceschedulers must be defined on all the masters where a web ui is configured. A particular forcescheduler runs on the master where the web request was sent. .. note:: This datatype and associated endpoints will be deprecated when :bug:`2673` will be resolved. properties: all_fields[]: description: '' type: object builder_names[]: description: names of the builders that this scheduler can trigger type: identifier button_name: description: label of the button to use in the UI type: string label: description: label of this scheduler to be displayed in the ui type: string name: description: name of this scheduler type: identifier type: object buildbot-2.6.0/master/buildbot/spec/types/identifier.raml000066400000000000000000000001051361162603000235110ustar00rootroot00000000000000#%RAML 1.0 DataType pattern: ^[a-zA-Z_-][a-zA-Z0-9_-]*$ type: string buildbot-2.6.0/master/buildbot/spec/types/log.raml000066400000000000000000000062041361162603000221560ustar00rootroot00000000000000#%RAML 1.0 DataType displayName: Logs description: | A log represents a stream of textual output from a step. The actual output is encoded as a sequence of :bb:rtype:`logchunk` resources. In-progress logs append logchunks as new data is added to the end, and event subscription allows a client to "follow" the log. Each log has a "slug" which is unique within the step, and which can be used in paths. The slug is generated by :py:meth:`~buildbot.data.logs.Log.addLog` based on the name, using :py:func:`~buildbot.util.identifiers.forceIdentifier` and :py:func:`~buildbot.util.identifiers.incrementIdentifier` to guarantee uniqueness. .. todo:: .. bb:event:: build.$buildid.step.$number.log.$logid.newlog The log has just started. Logs are started when they are created, so this also indicates the creation of a new log. .. bb:event:: build.$buildid.step.$number.log.$logid.complete The log is complete. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.logs.Log .. py:method:: addLog(stepid, name, type) :param integer stepid: stepid containing this log :param string name: name for the log :raises KeyError: if a log by the given name already exists :returns: logid via Deferred Create a new log and return its ID. The name need not be unique. This method will generate a unique slug based on the name. .. py:method:: appendLog(logid, content): :param integer logid: the log to which content should be appended :param unicode content: the content to append Append the given content to the given log. The content must end with a newline. All newlines in the content should be UNIX-style (``\n``). .. py:method:: finishLog(logid) :param integer logid: the log to finish Mark the log as complete. .. py:method:: compressLog(logid) :param integer logid: the log to compress Compress the given log, after it is finished. This operation may take some time. properties: complete: description: true if this log is complete and will not generate additional logchunks type: boolean logid: description: the unique ID of this log type: integer name: description: the name of this log (e.g., ``err.html``) type: string num_lines: description: total number of line of this log type: integer slug: description: the "slug", suitable for use in a URL, of this log (e.g., ``err_html``) type: identifier stepid: description: id of the step containing this log type: integer type: description: log type, identified by a single ASCII letter; see :bb:rtype:`logchunk` for details. type: identifier type: object example: 'logid': 60 'name': 'stdio' 'slug': 'stdio' 'stepid': 50 'complete': false 'num_lines': 0 'type': 's' buildbot-2.6.0/master/buildbot/spec/types/logchunk.raml000066400000000000000000000053731361162603000232150ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A logchunk represents a contiguous sequence of lines in a logfile. Logs are not individually addressable in the data API; instead, they must be requested by line number range. In a strict REST sense, many logchunk resources will contain the same line. The chunk contents is represented as a single unicode string. This string is the concatenation of each newline terminated-line. Each log has a type, as identified by the "type" field of the corresponding :bb:rtype:`log`. While all logs are sequences of unicode lines, the type gives additional information fo interpreting the contents. The defined types are: * ``t`` -- text, a simple sequence of lines of text * ``s`` -- stdio, like text but with each line tagged with a stream * ``h`` -- HTML, represented as plain text * ``d`` -- Deleted, logchunks for this log have been deleted by the Janitor In the stream type, each line is prefixed by a character giving the stream type for that line. The types are ``i`` for input, ``o`` for stdout, ``e`` for stderr, and ``h`` for header. The first three correspond to normal UNIX standard streams, while the header stream contains metadata produced by Buildbot itself. The ``offset`` and ``limit`` parameters can be used to select the desired lines. These are specified as query parameters via the REST interface, or as arguments to the :py:meth:`~buildbot.data.connector.DataConnector.get` method in Python. The result will begin with line ``offset`` (so the resulting ``firstline`` will be equal to the given ``offset``), and will contain up to ``limit`` lines. Following example will get the first 100 lines of a log:: from buildbot.data import resultspec first_100_lines = yield self.master.data.get(("logs", log['logid'], "contents"), resultSpec=resultspec.ResultSpec(limit=100)) Following example will get the last 100 lines of a log:: from buildbot.data import resultspec last_100_lines = yield self.master.data.get(("logs", log['logid'], "contents"), resultSpec=resultspec.ResultSpec(offset=log['num_lines']-100)) .. note:: There is no event for a new chunk. Instead, the log resource is updated when new chunks are added, with the new number of lines. Consumers can then request those lines, if desired. Update Methods -------------- Log chunks are updated via :bb:rtype:`log`. properties: content: description: content of the chunk type: string firstline: description: zero-based line number of the first line in this chunk type: integer logid: description: the ID of log containing this chunk type: integer type: object buildbot-2.6.0/master/buildbot/spec/types/master.raml000066400000000000000000000035241361162603000226720ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes buildmasters in the buildmaster cluster. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.masters.Master .. py:method:: masterActive(name, masterid) :param unicode name: the name of this master (generally ``hostname:basedir``) :param integer masterid: this master's master ID :returns: Deferred Mark this master as still active. This method should be called at startup and at least once per minute. The master ID is acquired directly from the database early in the master startup process. .. py:method:: expireMasters() :returns: Deferred Scan the database for masters that have not checked in for ten minutes. This method should be called about once per minute. .. py:method:: masterStopped(name, masterid) :param unicode name: the name of this master :param integer masterid: this master's master ID :returns: Deferred Mark this master as inactive. Masters should call this method before completing an expected shutdown, and on startup. This method will take care of deactivating or removing configuration resources like builders and schedulers as well as marking lost builds and build requests for retry. properties: active: description: true if the master is active type: boolean last_active: description: time this master was last marked active type: date masterid: description: the ID of this master type: integer name: description: master name (in the form "hostname:basedir") type: string type: object buildbot-2.6.0/master/buildbot/spec/types/patch.raml000066400000000000000000000016721361162603000225000ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a patch. Patches have unique IDs, but only appear embedded in sourcestamps, so those IDs are not especially useful. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.patches.Patch (no update methods) properties: patchid: description: the unique ID of this patch type: integer body: description: patch body as a binary string type: string level: description: patch level - the number of directory names to strip from filenames in the patch type: integer subdir: description: subdirectory in which patch should be applied type: string author?: description: patch author, or None type: string comment?: description: patch comment, or None type: string buildbot-2.6.0/master/buildbot/spec/types/rootlink.raml000066400000000000000000000001441361162603000232330ustar00rootroot00000000000000#%RAML 1.0 DataType properties: name: description: '' type: string type: object buildbot-2.6.0/master/buildbot/spec/types/scheduler.raml000066400000000000000000000026231361162603000233540ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A scheduler initiates builds, often in response to changes from change sources. A particular scheduler (by name) runs on at most one master at a time. .. note:: This data type and associated endpoints is planned to be merged with forcescheduler data type when :bug:`2673` will be resolved. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.schedulers.Scheduler .. py:method:: findSchedulerId(name) :param string name: scheduler name :returns: scheduler ID via Deferred Get the ID for the given scheduler name, inventing one if necessary. .. py:method:: trySetSchedulerMaster(schedulerid, masterid) :param integer schedulerid: scheduler ID to try to claim :param integer masterid: this master's master ID :returns: ``True`` or ``False``, via Deferred Try to claim the given scheduler for the given master and return ``True`` if the scheduler is to be activated on that master. properties: master?: description: the master on which this scheduler is running, or None if it is inactive type: master name: description: name of this scheduler type: string schedulerid: description: the ID of this scheduler type: integer type: object buildbot-2.6.0/master/buildbot/spec/types/sourcedproperties.raml000066400000000000000000000034441361162603000251610ustar00rootroot00000000000000#%RAML 1.0 DataType description: | user-specified properties for this change, represented as an object mapping keys to tuple (value, source) Properties are present in several data resources, but have a separate endpoints, because they can represent a large dataset. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.properties.Properties .. py:method:: setBuildProperty(buildid, name, value, source) :param integer buildid: build ID :param unicode name: Name of the property to set :param value: Value of the property :type value: Any JSON-able type is accepted (lists, dicts, strings and numbers) :param unicode source: Source of the property to set Set a build property. If no property with that name exists in that build, a new property will be created. .. py:method:: setBuildProperties(buildid, props) :param integer buildid: build ID :param IProperties props: Name of the property to set Synchronise build properties with the db. This sends only one event in the end of the sync, and only if properties changed. The event contains only the updated properties, for network efficiency reasons. properties: '[]': description: | Each key of this map is the name of a defined property The value consist on a couple (source, value) properties: 1: type: string description: source of the property 2: type: integer | string | object | array | boolean description: value of the property type: object type: object buildbot-2.6.0/master/buildbot/spec/types/sourcestamp.raml000066400000000000000000000035151361162603000237440ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A source stamp represents a particular version of the source code. Absolute sourcestamps specify this completely, while relative sourcestamps (with revision = None) specify the latest source at the current time. Source stamps can also have patches; such stamps describe the underlying revision with the given patch applied. Note that, depending on the underlying version-control system, the same revision may describe different code in different branches (e.g., SVN) or may be independent of the branch (e.g., Git). The ``created_at`` timestamp can be used to indicate the first time a sourcestamp was seen by Buildbot. This provides a reasonable default ordering for sourcestamps when more reliable information is not available. properties: ssid: description: | the ID of this sourcestamp .. note:: For legacy reasons, the abbreviated name ``ssid`` is used instead of canonical ``sourcestampid``. This might change in the future (:bug:`3509`). type: integer branch?: description: code branch, or none for the "default branch", whatever that might mean type: string codebase: description: revision for this sourcestamp, or none if unknown type: string created_at: description: the timestamp when this sourcestamp was created type: date patch?: description: the patch for this sourcestamp, or none type: patch project: description: user-defined project to which this sourcestamp corresponds type: string repository: description: repository where this sourcestamp occurred type: string revision?: description: revision for this sourcestamp, or none if unknown type: string type: object buildbot-2.6.0/master/buildbot/spec/types/spec.raml000066400000000000000000000004201361162603000223210ustar00rootroot00000000000000#%RAML 1.0 DataType properties: path: description: '' type: string plural: description: '' type: string type: description: '' type: string type_spec: description: '' type: object type: object buildbot-2.6.0/master/buildbot/spec/types/step.raml000066400000000000000000000063361361162603000223560ustar00rootroot00000000000000#%RAML 1.0 DataType description: | This resource type describes a step in a build. Steps have unique IDs, but are most commonly accessed by name in the context of their containing builds. Update Methods -------------- All update methods are available as attributes of ``master.data.updates``. .. py:class:: buildbot.data.steps.Step .. py:method:: newStep(buildid, name) :param integer buildid: buildid containing this step :param name: name for the step :type name: 50-character :ref:`identifier ` :returns: (stepid, number, name) via Deferred Create a new step and return its ID, number, and name. Note that the name may be different from the requested name, if that name was already in use. The state strings for the new step will be set to 'pending'. .. py:method:: startStep(stepid) :param integer stepid: the step to modify Start the step. .. py:method:: setStepStateString(stepid, state_string) :param integer stepid: the step to modify :param unicode state_string: new state strings for this step Replace the existing state string for a step with a new list. .. py:method:: addStepURL(stepid, name, url): :param integer stepid: the step to modify :param string name: the url name :param string url: the actual url :returns: None via deferred Add a new url to a step. The new url is added to the list of urls. .. py:method:: finishStep(stepid, results, hidden) :param integer stepid: the step to modify :param integer results: the step's results :param boolean hidden: true if the step should not be displayed Mark the step as finished at the current time, with the given results. properties: stepid: description: the unique ID of this step type: integer buildid: description: id of the build containing this step type: integer complete: description: true if this step is complete type: boolean complete_at?: description: time at which this step was complete, or None if it's still running type: date hidden: description: true if the step should not be displayed type: boolean name: description: the step name, unique within the build type: identifier number: description: the number of this step (sequential within the build) type: integer results?: description: the results of the step (see :ref:`Build-Result-Codes`), or None if not complete type: integer started_at?: description: time at which this step started, or None if it hasn't started yet type: date state_string: description: | a string giving detail on the state of the build. The first is usually one word or phrase; the remainder are sized for one-line display. type: string urls[]: description: a list of URLs associated with this step. properties: name: string url: string type: object buildbot-2.6.0/master/buildbot/spec/types/worker.raml000066400000000000000000000031211361162603000227010ustar00rootroot00000000000000#%RAML 1.0 DataType description: | A worker resource represents a worker to the source code monitored by Buildbot. The contents of the ``connected_to`` and ``configured_on`` attributes are sensitive to the context of the request. If a builder or master is specified in the path, then only the corresponding connections and configurations are included in the result. properties: workerid: description: the ID of this worker type: integer configured_on[]: description: list of builders on masters this worker is configured on properties: builderid: integer masterid: integer connected_to[]: description: list of masters this worker is attached to properties: masterid: integer name: description: the name of the worker type: string paused: description: the worker is paused if it is connected but doesn't accept new builds. type: bool graceful: description: the worker is graceful if it doesn't accept new builds, and will shutdown when builds are finished. type: bool workerinfo: description: | information about the worker. The worker information can be any JSON-able object. In practice, it contains the following keys, based on information provided by the worker: * ``admin`` (the admin information) * ``host`` (the name of the host) * ``access_uri`` (the access URI) * ``version`` (the version on the worker) type: object type: object buildbot-2.6.0/master/buildbot/statistics/000077500000000000000000000000001361162603000206125ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/statistics/__init__.py000066400000000000000000000035011361162603000227220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.statistics.capture import CaptureBuildDuration from buildbot.statistics.capture import CaptureBuildDurationAllBuilders from buildbot.statistics.capture import CaptureBuildEndTime from buildbot.statistics.capture import CaptureBuildEndTimeAllBuilders from buildbot.statistics.capture import CaptureBuildStartTime from buildbot.statistics.capture import CaptureBuildStartTimeAllBuilders from buildbot.statistics.capture import CaptureData from buildbot.statistics.capture import CaptureDataAllBuilders from buildbot.statistics.capture import CaptureProperty from buildbot.statistics.capture import CapturePropertyAllBuilders from buildbot.statistics.stats_service import StatsService from buildbot.statistics.storage_backends.influxdb_client import InfluxStorageService __all__ = [ 'CaptureBuildDuration', 'CaptureBuildDurationAllBuilders', 'CaptureBuildEndTime', 'CaptureBuildEndTimeAllBuilders', 'CaptureBuildStartTime', 'CaptureBuildStartTimeAllBuilders', 'CaptureData', 'CaptureDataAllBuilders', 'CaptureProperty', 'CapturePropertyAllBuilders', 'InfluxStorageService', 'StatsService' ] buildbot-2.6.0/master/buildbot/statistics/capture.py000066400000000000000000000304771361162603000226420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc import re from twisted.internet import defer from twisted.internet import threads from buildbot import config from buildbot.errors import CaptureCallbackError class Capture: """ Base class for all Capture* classes. """ __metaclass__ = abc.ABCMeta def __init__(self, routingKey, callback): self.routingKey = routingKey self._callback = callback # parent service and buildmaster to be set when StatsService # initialized self.parent_svcs = [] self.master = None def _defaultContext(self, msg, builder_name): return { "builder_name": builder_name, "build_number": str(msg['number']) } @abc.abstractmethod def consume(self, routingKey, msg): pass @defer.inlineCallbacks def _store(self, post_data, series_name, context): for svc in self.parent_svcs: yield threads.deferToThread(svc.thd_postStatsValue, post_data, series_name, context) class CapturePropertyBase(Capture): """ A base class for CaptureProperty* classes. """ def __init__(self, property_name, callback=None, regex=False): self._property_name = property_name self._regex = regex routingKey = ("builders", None, "builds", None, "finished") def default_callback(props, property_name): # index: 0 - prop_value, 1 - prop_source return props[property_name][0] if not callback: callback = default_callback super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for this (CaptureProperty) class. Gets the properties from data api and send them to the storage backends. """ builder_info = yield self.master.data.get(("builders", msg['builderid'])) if self._builder_name_matches(builder_info): properties = yield self.master.data.get(("builds", msg['buildid'], "properties")) if self._regex: filtered_prop_names = [ pn for pn in properties if re.match(self._property_name, pn)] else: filtered_prop_names = [self._property_name] for pn in filtered_prop_names: try: ret_val = self._callback(properties, pn) except KeyError: raise CaptureCallbackError("CaptureProperty failed." " The property %s not found for build number %s on" " builder %s." % (pn, msg['number'], builder_info['name'])) context = self._defaultContext(msg, builder_info['name']) series_name = "%s-%s" % (builder_info['name'], pn) post_data = { "name": pn, "value": ret_val } yield self._store(post_data, series_name, context) else: yield defer.succeed(None) @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureProperty(CapturePropertyBase): """ Convenience wrapper for getting statistics for filtering. Filters out build properties specifies in the config file. """ def __init__(self, builder_name, property_name, callback=None, regex=False): self._builder_name = builder_name super().__init__(property_name, callback, regex) def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CapturePropertyAllBuilders(CapturePropertyBase): """ Capture class for filtering out build properties for all builds. """ def _builder_name_matches(self, builder_info): # Since we need to match all builders, we simply return True here. return True class CaptureBuildTimes(Capture): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, callback, time_type): self._builder_name = builder_name routingKey = ("builders", None, "builds", None, "finished") self._time_type = time_type super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for CaptureBuildStartTime. Gets the build start time. """ builder_info = yield self.master.data.get(("builders", msg['builderid'])) if self._builder_name_matches(builder_info): try: ret_val = self._callback(*self._retValParams(msg)) except Exception as e: # catching generic exceptions is okay here since we propagate # it raise CaptureCallbackError("%s Exception raised: %s with message: %s" % (self._err_msg(msg, builder_info['name']), type(e).__name__, str(e))) context = self._defaultContext(msg, builder_info['name']) post_data = { self._time_type: ret_val } series_name = "%s-build-times" % builder_info['name'] yield self._store(post_data, series_name, context) else: yield defer.succeed(None) def _err_msg(self, build_data, builder_name): msg = "%s failed on build %s on builder %s." % (self.__class__.__name__, build_data['number'], builder_name) return msg @abc.abstractmethod def _retValParams(self, msg): pass @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureBuildStartTime(CaptureBuildTimes): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, callback=None): def default_callback(start_time): return start_time.isoformat() if not callback: callback = default_callback super().__init__(builder_name, callback, "start-time") def _retValParams(self, msg): return [msg['started_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildStartTimeAllBuilders(CaptureBuildStartTime): """ Capture methods for capturing build start times for all builders. """ def __init__(self, callback=None): super().__init__(None, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureBuildEndTime(CaptureBuildTimes): """ Capture methods for capturing build end times. """ def __init__(self, builder_name, callback=None): def default_callback(end_time): return end_time.isoformat() if not callback: callback = default_callback super().__init__(builder_name, callback, "end-time") def _retValParams(self, msg): return [msg['complete_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildEndTimeAllBuilders(CaptureBuildEndTime): """ Capture methods for capturing build end times on all builders. """ def __init__(self, callback=None): super().__init__(None, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureBuildDuration(CaptureBuildTimes): """ Capture methods for capturing build start times. """ def __init__(self, builder_name, report_in='seconds', callback=None): if report_in not in ['seconds', 'minutes', 'hours']: config.error("Error during initialization of class %s." " `report_in` parameter must be one of 'seconds', 'minutes' or 'hours'" % (self.__class__.__name__)) def default_callback(start_time, end_time): divisor = 1 # it's a closure if report_in == 'minutes': divisor = 60 elif report_in == 'hours': divisor = 60 * 60 duration = end_time - start_time # cannot use duration.total_seconds() on Python 2.6 duration = ((duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 1e6) / 1e6) return duration / divisor if not callback: callback = default_callback super().__init__(builder_name, callback, "duration") def _retValParams(self, msg): return [msg['started_at'], msg['complete_at']] def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureBuildDurationAllBuilders(CaptureBuildDuration): """ Capture methods for capturing build durations on all builders. """ def __init__(self, report_in='seconds', callback=None): super().__init__(None, report_in, callback) def _builder_name_matches(self, builder_info): # Match all builders so simply return True return True class CaptureDataBase(Capture): """ Base class for CaptureData methods. """ def __init__(self, data_name, callback=None): self._data_name = data_name def identity(x): return x if not callback: callback = identity # this is the routing key which is used to register consumers on to mq layer # this following key created in StatsService.yieldMetricsValue and used # here routingKey = ("stats-yieldMetricsValue", "stats-yield-data") super().__init__(routingKey, callback) @defer.inlineCallbacks def consume(self, routingKey, msg): """ Consumer for this (CaptureData) class. Gets the data sent from yieldMetricsValue and sends it to the storage backends. """ build_data = msg['build_data'] builder_info = yield self.master.data.get(("builders", build_data['builderid'])) if self._builder_name_matches(builder_info) and self._data_name == msg['data_name']: try: ret_val = self._callback(msg['post_data']) except Exception as e: raise CaptureCallbackError("CaptureData failed for build %s of builder %s." " Exception generated: %s with message %s" % (build_data['number'], builder_info['name'], type(e).__name__, str(e))) post_data = ret_val series_name = '%s-%s' % (builder_info['name'], self._data_name) context = self._defaultContext(build_data, builder_info['name']) yield self._store(post_data, series_name, context) @abc.abstractmethod def _builder_name_matches(self, builder_info): pass class CaptureData(CaptureDataBase): """ Capture methods for arbitrary data that may not be stored in the Buildbot database. """ def __init__(self, data_name, builder_name, callback=None): self._builder_name = builder_name super().__init__(data_name, callback) def _builder_name_matches(self, builder_info): return self._builder_name == builder_info['name'] class CaptureDataAllBuilders(CaptureDataBase): """ Capture methods for arbitrary data that may not be stored in the Buildbot database. """ def _builder_name_matches(self, builder_info): return True buildbot-2.6.0/master/buildbot/statistics/stats_service.py000066400000000000000000000066221361162603000240500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.statistics.storage_backends.base import StatsStorageBase from buildbot.util import service class StatsService(service.BuildbotService): """ A middleware for passing on statistics data to all storage backends. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.consumers = [] def checkConfig(self, storage_backends): for wfb in storage_backends: if not isinstance(wfb, StatsStorageBase): raise TypeError("Invalid type of stats storage service {0!r}. " "Should be of type StatsStorageBase, " "is: {0!r}".format(type(StatsStorageBase))) def reconfigService(self, storage_backends): log.msg( "Reconfiguring StatsService with config: {0!r}".format(storage_backends)) self.checkConfig(storage_backends) self.registeredStorageServices = [] for svc in storage_backends: self.registeredStorageServices.append(svc) self.removeConsumers() self.registerConsumers() @defer.inlineCallbacks def registerConsumers(self): self.consumers = [] for svc in self.registeredStorageServices: for cap in svc.captures: cap.parent_svcs.append(svc) cap.master = self.master consumer = yield self.master.mq.startConsuming(cap.consume, cap.routingKey) self.consumers.append(consumer) @defer.inlineCallbacks def stopService(self): yield super().stopService() self.removeConsumers() @defer.inlineCallbacks def removeConsumers(self): for consumer in self.consumers: yield consumer.stopConsuming() self.consumers = [] @defer.inlineCallbacks def yieldMetricsValue(self, data_name, post_data, buildid): """ A method to allow posting data that is not generated and stored as build-data in the database. This method generates the `stats-yield-data` event to the mq layer which is then consumed in self.postData. @params data_name: (str) The unique name for identifying this data. post_data: (dict) A dictionary of key-value pairs that'll be sent for storage. buildid: The buildid of the current Build. """ build_data = yield self.master.data.get(('builds', buildid)) routingKey = ("stats-yieldMetricsValue", "stats-yield-data") msg = { 'data_name': data_name, 'post_data': post_data, 'build_data': build_data } self.master.mq.produce(routingKey, msg) buildbot-2.6.0/master/buildbot/statistics/storage_backends/000077500000000000000000000000001361162603000241105ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/statistics/storage_backends/__init__.py000066400000000000000000000013011361162603000262140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members buildbot-2.6.0/master/buildbot/statistics/storage_backends/base.py000066400000000000000000000017451361162603000254030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc class StatsStorageBase: """ Base class for sub service responsible for passing on stats data to a storage backend """ __metaclass__ = abc.ABCMeta @abc.abstractmethod def thd_postStatsValue(self, post_data, series_name, context=None): pass buildbot-2.6.0/master/buildbot/statistics/storage_backends/influxdb_client.py000066400000000000000000000041351361162603000276360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot import config from buildbot.statistics.storage_backends.base import StatsStorageBase try: from influxdb import InfluxDBClient except ImportError: InfluxDBClient = None class InfluxStorageService(StatsStorageBase): """ Delegates data to InfluxDB """ def __init__(self, url, port, user, password, db, captures, name="InfluxStorageService"): if not InfluxDBClient: config.error("Python client for InfluxDB not installed.") return self.url = url self.port = port self.user = user self.password = password self.db = db self.name = name self.captures = captures self.client = InfluxDBClient(self.url, self.port, self.user, self.password, self.db) self._inited = True def thd_postStatsValue(self, post_data, series_name, context=None): if not self._inited: log.err("Service {0} not initialized".format(self.name)) return data = { 'measurement': series_name, 'fields': post_data } log.msg("Sending data to InfluxDB") log.msg("post_data: {0!r}".format(post_data)) if context: log.msg("context: {0!r}".format(context)) data['tags'] = context self.client.write_points([data]) buildbot-2.6.0/master/buildbot/status/000077500000000000000000000000001361162603000177435ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/status/__init__.py000066400000000000000000000013121361162603000220510ustar00rootroot00000000000000from buildbot.status import build from buildbot.status import builder from buildbot.status import buildrequest from buildbot.status import buildset from buildbot.status import master # styles.Versioned requires this, as it keys the version numbers on the fully # qualified class name; see master/buildbot/test/regressions/test_unpickling.py build.BuildStatus.__module__ = 'buildbot.status.builder' # add all of these classes to builder; this is a form of late binding to allow # circular module references among the status modules builder.BuildSetStatus = buildset.BuildSetStatus builder.Status = master.Status builder.BuildStatus = build.BuildStatus builder.BuildRequestStatus = buildrequest.BuildRequestStatus buildbot-2.6.0/master/buildbot/status/base.py000066400000000000000000000052111361162603000212260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot import pbutil from buildbot import util from buildbot.interfaces import IStatusReceiver from buildbot.util import service @implementer(IStatusReceiver) class StatusReceiverBase: def requestSubmitted(self, request): pass def requestCancelled(self, builder, request): pass def buildsetSubmitted(self, buildset): pass def builderAdded(self, builderName, builder): pass def builderChangedState(self, builderName, state): pass def buildStarted(self, builderName, build): pass def buildETAUpdate(self, build, ETA): pass def changeAdded(self, change): pass def stepStarted(self, build, step): pass def stepTextChanged(self, build, step, text): pass def stepText2Changed(self, build, step, text2): pass def logStarted(self, build, step, log): pass def logChunk(self, build, step, log, channel, text): pass def logFinished(self, build, step, log): pass def stepFinished(self, build, step, results): pass def buildFinished(self, builderName, build, results): pass def builderRemoved(self, builderName): pass def workerConnected(self, workerName): pass def workerDisconnected(self, workerName): pass def workerPaused(self, name): pass def workerUnpaused(self, name): pass def checkConfig(self, otherStatusReceivers): pass class StatusReceiverMultiService(StatusReceiverBase, service.AsyncMultiService, util.ComparableMixin): pass class StatusReceiverService(StatusReceiverBase, service.AsyncService, util.ComparableMixin): pass StatusReceiver = StatusReceiverService @implementer(IStatusReceiver) class StatusReceiverPerspective(StatusReceiver, pbutil.NewCredPerspective): pass buildbot-2.6.0/master/buildbot/status/build.py000066400000000000000000000220351361162603000214160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from zope.interface import implementer from buildbot import interfaces from buildbot import util @implementer(interfaces.IBuildStatus, interfaces.IStatusEvent) class BuildStatus(): sources = None reason = None changes = [] blamelist = [] progress = None started = None finished = None currentStep = None text = [] results = None # these lists/dicts are defined here so that unserialized instances have # (empty) values. They are set in __init__ to new objects to make sure # each instance gets its own copy. watchers = [] updates = {} finishedWatchers = [] testResults = {} def __init__(self, parent, master, number): """ @type parent: L{BuilderStatus} @type number: int """ assert interfaces.IBuilderStatus(parent) self.builder = parent self.master = master self.number = number self.watchers = [] self.updates = {} self.finishedWatchers = [] self.steps = [] self.testResults = {} self.workername = "???" def __repr__(self): return "<%s #%s>" % (self.__class__.__name__, self.number) # IBuildStatus def getBuilder(self): """ @rtype: L{BuilderStatus} """ return self.builder def getNumber(self): return self.number def getPreviousBuild(self): if self.number == 0: return None return self.builder.getBuild(self.number - 1) def getSourceStamps(self, absolute=False): return {} def getReason(self): return self.reason def getChanges(self): return self.changes def getRevisions(self): revs = [] for c in self.changes: rev = str(c.revision) if rev > 7: # for long hashes rev = rev[:7] revs.append(rev) return ", ".join(revs) def getResponsibleUsers(self): return self.blamelist def getSteps(self): """Return a list of dictionary objects, each of which describes a step. For invariant builds (those which always use the same set of Steps), this should be the complete list, however some of the steps may not have started yet (step.getTimes()[0] will be None). For variant builds, this may not be complete (asking again later may give you more of them).""" return self.steps def getTimes(self): return (self.started, self.finished) _sentinel = [] # used as a sentinel to indicate unspecified initial_value def isFinished(self): return (self.finished is not None) def waitUntilFinished(self): if self.finished: d = defer.succeed(self) else: d = defer.Deferred() self.finishedWatchers.append(d) return d # while the build is running, the following methods make sense. # Afterwards they return None def getETA(self): return None def getCurrentStep(self): return self.currentStep # Once you know the build has finished, the following methods are legal. # Before this build has finished, they all return None. def getText(self): text = [] text.extend(self.text) for s in self.steps: text.extend(s.text2) return text def getResults(self): return self.results def getWorkername(self): return self.workername def getTestResults(self): return self.testResults def getLogs(self): logs = [] for s in self.steps: for loog in s.getLogs(): logs.append(loog) return logs # subscription interface def subscribe(self, receiver, updateInterval=None): # will receive stepStarted and stepFinished messages # and maybe buildETAUpdate self.watchers.append(receiver) if updateInterval is not None: self.sendETAUpdate(receiver, updateInterval) def sendETAUpdate(self, receiver, updateInterval): self.updates[receiver] = None # they might have unsubscribed during buildETAUpdate if receiver in self.watchers: self.updates[receiver] = reactor.callLater(updateInterval, self.sendETAUpdate, receiver, updateInterval) def unsubscribe(self, receiver): if receiver in self.watchers: self.watchers.remove(receiver) if receiver in self.updates: if self.updates[receiver] is not None: self.updates[receiver].cancel() del self.updates[receiver] # methods for the base.Build to invoke def addTestResult(self, result): self.testResults[result.getName()] = result def setSourceStamps(self, sourceStamps): self.sources = sourceStamps self.changes = [] for source in self.sources: self.changes.extend(source.changes) def setReason(self, reason): self.reason = reason def setBlamelist(self, blamelist): self.blamelist = blamelist def setProgress(self, progress): self.progress = progress def buildStarted(self, build): """The Build has been set up and is about to be started. It can now be safely queried, so it is time to announce the new build.""" self.started = util.now() # now that we're ready to report status, let the BuilderStatus tell # the world about us self.builder.buildStarted(self) def setWorkername(self, workername): self.workername = workername def setText(self, text): assert isinstance(text, (list, tuple)) self.text = text def setResults(self, results): self.results = results def buildFinished(self): self.currentStep = None self.finished = util.now() for update in self.updates: if self.updates[update] is not None: self.updates[update].cancel() del self.updates[update] watchers = self.finishedWatchers self.finishedWatchers = [] for w in watchers: w.callback(self) # methods previously called by our now-departed BuildStepStatus children def stepStarted(self, step): self.currentStep = step for w in self.watchers: receiver = w.stepStarted(self, step) if receiver: if isinstance(receiver, type(())): step.subscribe(receiver[0], receiver[1]) else: step.subscribe(receiver) d = step.waitUntilFinished() # TODO: This actually looks like a bug, but this code # will be removed anyway. # pylint: disable=cell-var-from-loop d.addCallback(lambda step: step.unsubscribe(receiver)) step.waitUntilFinished().addCallback(self._stepFinished) def _stepFinished(self, step): results = step.getResults() for w in self.watchers: w.stepFinished(self, step, results) # methods called by our BuilderStatus parent def pruneSteps(self): # this build is very old: remove the build steps too self.steps = [] def asDict(self): result = {} # Constant result['builderName'] = self.builder.name result['number'] = self.getNumber() result['sourceStamps'] = [ss.asDict() for ss in self.getSourceStamps()] result['reason'] = self.getReason() result['blame'] = self.getResponsibleUsers() # Transient result['times'] = self.getTimes() result['text'] = self.getText() result['results'] = self.getResults() result['worker'] = self.getWorkername() # TODO(maruel): Add. # result['test_results'] = self.getTestResults() result['logs'] = [[l.getName(), self.builder.status.getURLForThing(l)] for l in self.getLogs()] result['eta'] = None result['steps'] = [bss.asDict() for bss in self.steps] if self.getCurrentStep(): result['currentStep'] = self.getCurrentStep().asDict() else: result['currentStep'] = None return result buildbot-2.6.0/master/buildbot/status/builder.py000066400000000000000000000330401361162603000217430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import itertools import os from twisted.persisted import styles from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot import util # user modules expect these symbols to be present here from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.process.results import worst_status from buildbot.status.build import BuildStatus from buildbot.status.buildrequest import BuildRequestStatus from buildbot.status.event import Event from buildbot.util.lru import LRUCache _hush_pyflakes = [SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, CANCELLED, Results, worst_status] @implementer(interfaces.IBuilderStatus, interfaces.IEventSource) class BuilderStatus(styles.Versioned): """I handle status information for a single process.build.Builder object. That object sends status changes to me (frequently as Events), and I provide them on demand to the various status recipients, like the HTML waterfall display and the live status clients. It also sends build summaries to me, which I log and provide to status clients who aren't interested in seeing details of the individual build steps. I am responsible for maintaining the list of historic Events and Builds, pruning old ones, and loading them from / saving them to disk. I live in the buildbot.process.build.Builder object, in the .builder_status attribute. @type tags: None or list of strings @ivar tags: user-defined "tag" this builder has; can be used to filter on in status clients """ persistenceVersion = 2 persistenceForgets = ('wasUpgraded', ) tags = None currentBigState = "offline" # or idle/waiting/interlocked/building basedir = None # filled in by our parent def __init__(self, buildername, tags, master, description): self.name = buildername self.tags = tags self.description = description self.master = master self.workernames = [] self.events = [] # these three hold Events, and are used to retrieve the current # state of the boxes. self.lastBuildStatus = None self.currentBuilds = [] self.nextBuild = None self.watchers = [] self.buildCache = LRUCache(self.cacheMiss) # build cache management def setCacheSize(self, size): self.buildCache.set_max_size(size) def getBuildByNumber(self, number): return self.buildCache.get(number) def cacheMiss(self, number, **kwargs): # If kwargs['val'] exists, this is a new value being added to # the cache. Just return it. if 'val' in kwargs: return kwargs['val'] # first look in currentBuilds for b in self.currentBuilds: if b.number == number: return b # Otherwise it is in the database and thus inaccessible. return None def prune(self, events_only=False): pass # IBuilderStatus methods def getName(self): # if builderstatus page does show not up without any reason then # str(self.name) may be a workaround return self.name def setDescription(self, description): # used during reconfig self.description = description def getDescription(self): return self.description def getState(self): return (self.currentBigState, self.currentBuilds) def getWorkers(self): return [self.status.getWorker(name) for name in self.workernames] def getPendingBuildRequestStatuses(self): # just assert 0 here. According to dustin the whole class will go away # soon. assert 0 db = self.status.master.db d = db.buildrequests.getBuildRequests(claimed=False, buildername=self.name) @d.addCallback def make_statuses(brdicts): return [BuildRequestStatus(self.name, brdict['brid'], self.status, brdict=brdict) for brdict in brdicts] return d def getCurrentBuilds(self): return self.currentBuilds def getLastFinishedBuild(self): b = self.getBuild(-1) if not (b and b.isFinished()): b = self.getBuild(-2) return b def getTags(self): return self.tags def setTags(self, tags): # used during reconfig self.tags = tags def matchesAnyTag(self, tags): # Need to guard against None with the "or []". return bool(set(self.tags or []) & set(tags)) def getBuildByRevision(self, rev): number = self.nextBuildNumber - 1 while number > 0: build = self.getBuildByNumber(number) got_revision = build.getAllGotRevisions().get("") if rev == got_revision: return build number -= 1 return None def getBuild(self, number, revision=None): if revision is not None: return self.getBuildByRevision(revision) if number < 0: number = self.nextBuildNumber + number if number < 0 or number >= self.nextBuildNumber: return None try: return self.getBuildByNumber(number) except IndexError: return None def getEvent(self, number): return None def _getBuildBranches(self, build): return {ss.branch for ss in build.getSourceStamps()} def generateFinishedBuilds(self, branches=None, num_builds=None, max_buildnum=None, finished_before=None, results=None, max_search=200, filter_fn=None): got = 0 if branches is None: branches = set() else: branches = set(branches) for Nb in itertools.count(1): if Nb > self.nextBuildNumber: break if Nb > max_search: break build = self.getBuild(-Nb) if build is None: continue if max_buildnum is not None: if build.getNumber() > max_buildnum: continue if not build.isFinished(): continue if finished_before is not None: start, end = build.getTimes() if end >= finished_before: continue # if we were asked to filter on branches, and none of the # sourcestamps match, skip this build if branches and not branches & self._getBuildBranches(build): continue if results is not None: if build.getResults() not in results: continue if filter_fn is not None: if not filter_fn(build): continue got += 1 yield build if num_builds is not None: if got >= num_builds: return def eventGenerator(self, branches=None, categories=None, committers=None, projects=None, minTime=0): """ Not implemented """ def subscribe(self, receiver): # will get builderChangedState, buildStarted, buildFinished, # requestSubmitted, requestCancelled. Note that a request which is # resubmitted (due to a worker disconnect) will cause requestSubmitted # to be invoked multiple times. self.watchers.append(receiver) self.publishState(receiver) # our parent Status provides requestSubmitted and requestCancelled self.status._builder_subscribe(self.name, receiver) def unsubscribe(self, receiver): self.watchers.remove(receiver) self.status._builder_unsubscribe(self.name, receiver) # Builder interface (methods called by the Builder which feeds us) def setWorkernames(self, names): self.workernames = names def addEvent(self, text=None): # this adds a duration event. When it is done, the user should call # e.finish(). They can also mangle it by modifying .text e = Event() e.started = util.now() if text is None: text = [] e.text = text return e # they are free to mangle it further def addPointEvent(self, text=None): # this adds a point event, one which occurs as a single atomic # instant of time. e = Event() e.started = util.now() e.finished = 0 if text is None: text = [] e.text = text return e # for consistency, but they really shouldn't touch it def setBigState(self, state): needToUpdate = state != self.currentBigState self.currentBigState = state if needToUpdate: self.publishState() def publishState(self, target=None): state = self.currentBigState if target is not None: # unicast target.builderChangedState(self.name, state) return for w in self.watchers: try: w.builderChangedState(self.name, state) except Exception: log.msg("Exception caught publishing state to %r" % w) log.err() def newBuild(self): s = BuildStatus(self, self.master, 0) return s # buildStarted is called by our child BuildStatus instances def buildStarted(self, s): """Now the BuildStatus object is ready to go (it knows all of its Steps, its ETA, etc), so it is safe to notify our watchers.""" assert s.builder is self # paranoia assert s not in self.currentBuilds self.currentBuilds.append(s) self.buildCache.get(s.number, val=s) # now that the BuildStatus is prepared to answer queries, we can # announce the new build to all our watchers for w in self.watchers: # TODO: maybe do this later? callLater(0)? try: receiver = w.buildStarted(self.getName(), s) if receiver: if isinstance(receiver, type(())): s.subscribe(receiver[0], receiver[1]) else: s.subscribe(receiver) d = s.waitUntilFinished() # TODO: This actually looks like a bug, but this code # will be removed anyway. # pylint: disable=cell-var-from-loop d.addCallback(lambda s: s.unsubscribe(receiver)) except Exception: log.msg( "Exception caught notifying %r of buildStarted event" % w) log.err() def _buildFinished(self, s): assert s in self.currentBuilds self.currentBuilds.remove(s) name = self.getName() results = s.getResults() for w in self.watchers: try: w.buildFinished(name, s, results) except Exception: log.msg( "Exception caught notifying %r of buildFinished event" % w) log.err() def asDict(self): # Collect build numbers. # Important: Only grab the *cached* builds numbers to reduce I/O. current_builds = [b.getNumber() for b in self.currentBuilds] cached_builds = sorted(set(list(self.buildCache) + current_builds)) result = { # Constant # TODO(maruel): Fix me. We don't want to leak the full path. 'basedir': os.path.basename(self.basedir), 'tags': self.getTags(), 'workers': self.workernames, 'schedulers': [s.name for s in self.status.master.allSchedulers() if self.name in s.builderNames], # TODO(maruel): Add cache settings? Do we care? # Transient 'cachedBuilds': cached_builds, 'currentBuilds': current_builds, 'state': self.getState()[0], # lies, but we don't have synchronous access to this info; use # asDict_async instead 'pendingBuilds': 0 } return result def asDict_async(self): """Just like L{asDict}, but with a nonzero pendingBuilds.""" result = self.asDict() d = self.getPendingBuildRequestStatuses() @d.addCallback def combine(statuses): result['pendingBuilds'] = len(statuses) return result return d def getMetrics(self): return self.botmaster.parent.metrics # vim: set ts=4 sts=4 sw=4 et: buildbot-2.6.0/master/buildbot/status/buildrequest.py000066400000000000000000000113621361162603000230300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot.util.eventual import eventually @implementer(interfaces.IBuildRequestStatus) class BuildRequestStatus: def __init__(self, buildername, brid, status, brdict=None): self.buildername = buildername self.brid = brid self.status = status self.master = status.master self._brdict = brdict self._buildrequest = None self._buildrequest_lock = defer.DeferredLock() @defer.inlineCallbacks def _getBuildRequest(self): """ Get the underlying BuildRequest object for this status. This is a slow operation! @returns: BuildRequest instance or None, via Deferred """ # late binding to avoid an import cycle from buildbot.process import buildrequest # this is only set once, so no need to lock if we already have it if self._buildrequest: return self._buildrequest yield self._buildrequest_lock.acquire() try: if not self._buildrequest: if self._brdict is None: self._brdict = ( yield self.master.db.buildrequests.getBuildRequest( self.brid)) br = yield buildrequest.BuildRequest.fromBrdict(self.master, self._brdict) self._buildrequest = br finally: self._buildrequest_lock.release() self._buildrequest_lock.release() return self._buildrequest def buildStarted(self, build): self.status._buildrequest_buildStarted(build.status) self.builds.append(build.status) # methods called by our clients @defer.inlineCallbacks def getBsid(self): br = yield self._getBuildRequest() return br.bsid @defer.inlineCallbacks def getBuildProperties(self): br = yield self._getBuildRequest() return br.properties def getSourceStamp(self): # TODO.. return defer.succeed(None) def getBuilderName(self): return self.buildername @defer.inlineCallbacks def getBuilds(self): builder = self.status.getBuilder(self.getBuilderName()) builds = [] bdicts = yield self.master.db.builds.getBuilds(buildrequestid=self.brid) buildnums = sorted([bdict['number'] for bdict in bdicts]) for buildnum in buildnums: bs = builder.getBuild(buildnum) if bs: builds.append(bs) return builds def subscribe(self, observer): d = self.getBuilds() @d.addCallback def notify_old(oldbuilds): for bs in oldbuilds: eventually(observer, bs) d.addCallback(lambda _: self.status._buildrequest_subscribe(self.brid, observer)) d.addErrback(log.err, 'while notifying subscribers') def unsubscribe(self, observer): self.status._buildrequest_unsubscribe(self.brid, observer) @defer.inlineCallbacks def getSubmitTime(self): br = yield self._getBuildRequest() return br.submittedAt def asDict(self): result = {} # Constant result['source'] = None # not available sync, sorry result['builderName'] = self.buildername result['submittedAt'] = None # not available sync, sorry # Transient result['builds'] = [] # not available async, sorry return result @defer.inlineCallbacks def asDict_async(self): result = {} ss = yield self.getSourceStamp() result['source'] = ss.asDict() props = yield self.getBuildProperties() result['properties'] = props.asList() result['builderName'] = self.getBuilderName() result['submittedAt'] = yield self.getSubmitTime() builds = yield self.getBuilds() result['builds'] = [build.asDict() for build in builds] return result buildbot-2.6.0/master/buildbot/status/buildset.py000066400000000000000000000106311361162603000221310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot import interfaces from buildbot.data import resultspec from buildbot.status.buildrequest import BuildRequestStatus @implementer(interfaces.IBuildSetStatus) class BuildSetStatus: def __init__(self, bsdict, status): self.id = bsdict['bsid'] self.bsdict = bsdict self.status = status self.master = status.master # methods for our clients def getReason(self): return self.bsdict['reason'] def getResults(self): return self.bsdict['results'] def getID(self): return self.bsdict['external_idstring'] def isFinished(self): return self.bsdict['complete'] def getBuilderNamesAndBuildRequests(self): # returns a Deferred; undocumented method that may be removed # without warning d = self.master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [self.id])]) @d.addCallback def get_objects(brdicts): return { brd['buildername']: BuildRequestStatus(brd['buildername'], brd['brid'], self.status) for brd in brdicts} return d def getBuilderNames(self): d = self.master.data.get(('buildrequests', ), filters=[resultspec.Filter('buildsetid', 'eq', [self.id])]) @d.addCallback def get_names(brdicts): return sorted([brd['buildername'] for brd in brdicts]) return d def waitUntilFinished(self): return self.status._buildset_waitUntilFinished(self.id) def asDict(self): d = dict(self.bsdict) d["submitted_at"] = str(self.bsdict["submitted_at"]) return d class BuildSetSummaryNotifierMixin: _buildsetCompleteConsumer = None def summarySubscribe(self): startConsuming = self.master.mq.startConsuming self._buildsetCompleteConsumer = yield startConsuming( self._buildsetComplete, ('buildsets', None, 'complete')) def summaryUnsubscribe(self): if self._buildsetCompleteConsumer is not None: self._buildsetCompleteConsumer.stopConsuming() self._buildsetCompleteConsumer = None def sendBuildSetSummary(self, buildset, builds): raise NotImplementedError @defer.inlineCallbacks def _buildsetComplete(self, key, msg): bsid = msg['bsid'] # first, just get the buildset and all build requests for our buildset # id dl = [self.master.db.buildsets.getBuildset(bsid=bsid), self.master.db.buildrequests.getBuildRequests(bsid=bsid)] (buildset, breqs) = yield defer.gatherResults(dl) # next, get the bdictlist for each build request dl = [] for breq in breqs: d = self.master.db.builds.getBuilds( buildrequestid=breq['buildrequestid']) dl.append(d) buildinfo = yield defer.gatherResults(dl) # next, get the builder for each build request, and for each bdict, # look up the actual build object, using the bdictlist retrieved above builds = [] for (breq, bdictlist) in zip(breqs, buildinfo): builder = self.master_status.getBuilder(breq['buildername']) for bdict in bdictlist: build = builder.getBuild(bdict['number']) if build is not None: builds.append(build) if builds: # We've received all of the information about the builds in this # buildset; now send out the summary self.sendBuildSetSummary(buildset, builds) buildbot-2.6.0/master/buildbot/status/client.py000066400000000000000000000020701361162603000215720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot.status import base class PBListener(base.StatusReceiverBase): # This class is still present in users' configs, so keep it here. def __init__(self, port, user="statusClient", passwd="clientpw"): log.msg("The PBListener status listener is unused and can be removed " "from the configuration") buildbot-2.6.0/master/buildbot/status/event.py000066400000000000000000000022061361162603000214360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot import interfaces from buildbot import util @implementer(interfaces.IStatusEvent) class Event: started = None finished = None text = [] # IStatusEvent methods def getTimes(self): return (self.started, self.finished) def getText(self): return self.text def getLogs(self): return [] def finish(self): self.finished = util.now() buildbot-2.6.0/master/buildbot/status/master.py000066400000000000000000000403311361162603000216110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from urllib.parse import quote as urlquote from twisted.internet import defer from twisted.python import log from zope.interface import implementer from buildbot import interfaces from buildbot import util from buildbot.changes import changes from buildbot.status import builder from buildbot.status import buildrequest from buildbot.status import buildset from buildbot.util import bbcollections from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util.eventual import eventually @implementer(interfaces.IStatus) class Status(service.ReconfigurableServiceMixin, service.AsyncMultiService): def __init__(self): super().__init__() self.watchers = [] # No default limit to the log size self.logMaxSize = None self._builder_observers = bbcollections.KeyedSets() self._buildreq_observers = bbcollections.KeyedSets() self._buildset_finished_waiters = bbcollections.KeyedSets() self._buildset_completion_sub = None self._buildset_sub = None self._build_request_sub = None self._change_sub = None @property def botmaster(self): return self.master.botmaster @property def workers(self): return self.master.workers @property def basedir(self): return self.master.basedir # service management @defer.inlineCallbacks def startService(self): # subscribe to the things we need to know about self._buildset_new_consumer = yield self.master.mq.startConsuming( self.bs_new_consumer_cb, ('buildsets', None, 'new')) self._buildset_complete_consumer = yield self.master.mq.startConsuming( self.bs_complete_consumer_cb, ('buildsets', None, 'complete')) self._br_consumer = yield self.master.mq.startConsuming( self.br_consumer_cb, ('buildrequests', None, 'new')) self._change_consumer = yield self.master.mq.startConsuming( self.change_consumer_cb, ('changes', None, 'new')) yield super().startService() @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # remove the old listeners, then add the new for sr in list(self): yield sr.disownServiceParent() for sr in new_config.status: yield sr.setServiceParent(self) # reconfig any newly-added change sources, as well as existing yield super().reconfigServiceWithBuildbotConfig(new_config) def stopService(self): if self._buildset_complete_consumer: self._buildset_complete_consumer.stopConsuming() self._buildset_complete_consumer = None if self._buildset_new_consumer: self._buildset_new_consumer.stopConsuming() self._buildset_new_consumer = None if self._change_consumer: self._change_consumer.stopConsuming() self._change_consumer = None return super().stopService() # clean shutdown @property def shuttingDown(self): return self.botmaster.shuttingDown def cleanShutdown(self): return self.botmaster.cleanShutdown() def cancelCleanShutdown(self): return self.botmaster.cancelCleanShutdown() # methods called by our clients def getTitle(self): return self.master.config.title def getTitleURL(self): return self.master.config.titleURL def getBuildbotURL(self): return self.master.config.buildbotURL def getStatus(self): # some listeners expect their .parent to be a BuildMaster object, and # use this method to get the Status object. This is documented, so for # now keep it working. return self def getMetrics(self): return self.master.metrics def getURLForBuild(self, builderid, build_number): prefix = self.getBuildbotURL() return prefix + "#builders/%d/builds/%d" % ( builderid, build_number) def _getURLForBuildWithBuildername(self, builder_name, build_number): # don't use this API. this URL is not supported # its here waiting for getURLForThing removal or switch to deferred prefix = self.getBuildbotURL() return prefix + "#builders/%s/builds/%d" % ( urlquote(builder_name, safe=''), build_number) def getURLForBuildrequest(self, buildrequestid): prefix = self.getBuildbotURL() return prefix + "#buildrequests/%d" % (buildrequestid,) def getURLForThing(self, thing): prefix = self.getBuildbotURL() if not prefix: return None if interfaces.IStatus.providedBy(thing): return prefix if interfaces.ISchedulerStatus.providedBy(thing): pass if interfaces.IBuilderStatus.providedBy(thing): bldr = thing return prefix + "#builders/%s" % ( urlquote(bldr.getName(), safe=''), ) if interfaces.IBuildStatus.providedBy(thing): build = thing bldr = build.getBuilder() # should be: # builderid = yield bldr.getBuilderId() # return self.getURLForBuild(self, builderid, build.getNumber()) return self._getURLForBuildWithBuildername(bldr.getName(), build.getNumber()) if interfaces.IBuildStepStatus.providedBy(thing): step = thing build = step.getBuild() bldr = build.getBuilder() return prefix + "#builders/%s/builds/%d/steps/%s" % ( urlquote(bldr.getName(), safe=''), build.getNumber(), urlquote(step.getName(), safe='')) # IBuildSetStatus # IBuildRequestStatus # IWorkerStatus if interfaces.IWorkerStatus.providedBy(thing): worker = thing return prefix + "#workers/%s" % ( urlquote(worker.getName(), safe=''), ) # IStatusEvent if interfaces.IStatusEvent.providedBy(thing): # TODO: this is goofy, create IChange or something if isinstance(thing, changes.Change): change = thing return "%s#changes/%d" % (prefix, change.number) def getChangeSources(self): return list(self.master.change_svc) def getChange(self, number): """Get a Change object; returns a deferred""" d = self.master.db.changes.getChange(number) @d.addCallback def chdict2change(chdict): if not chdict: return None return changes.Change.fromChdict(self.master, chdict) return d def getSchedulers(self): return self.master.allSchedulers() def getBuilderNames(self, tags=None, categories=None): if categories is not None: # Categories is deprecated; pretend they said "tags". tags = categories if tags is None: # don't let them break it return util.naturalSort(self.botmaster.builderNames) ret = [] # respect addition order for name in self.botmaster.builderNames: bldr = self.getBuilder(name) if bldr.matchesAnyTag(tags): ret.append(name) return util.naturalSort(ret) def getBuilder(self, name): """ @rtype: L{BuilderStatus} """ return self.botmaster.builders[name].builder_status def getWorkerNames(self): return list(self.workers.workers.items()) def getWorker(self, workername): return self.workers.workers[workername].worker_status def getBuildSets(self): d = self.master.db.buildsets.getBuildsets(complete=False) @d.addCallback def make_status_objects(bsdicts): return [buildset.BuildSetStatus(bsdict, self) for bsdict in bsdicts] return d def generateFinishedBuilds(self, builders=None, branches=None, num_builds=None, finished_before=None, max_search=200): if builders is None: builders = [] if branches is None: branches = [] def want_builder(bn): if builders: return bn in builders return True builder_names = [bn for bn in self.getBuilderNames() if want_builder(bn)] # 'sources' is a list of generators, one for each Builder we're # using. When the generator is exhausted, it is replaced in this list # with None. sources = [] for bn in builder_names: bldr = self.getBuilder(bn) g = bldr.generateFinishedBuilds(branches, finished_before=finished_before, max_search=max_search) sources.append(g) # next_build the next build from each source next_build = [None] * len(sources) def refill(): for i, g in enumerate(sources): if next_build[i]: # already filled continue if not g: # already exhausted continue try: next_build[i] = next(g) except StopIteration: next_build[i] = None sources[i] = None got = 0 while True: refill() # find the latest build among all the candidates candidates = [(i, b, b.getTimes()[1]) for i, b in enumerate(next_build) if b is not None] candidates.sort(key=lambda x: x[2]) if not candidates: return # and remove it from the list i, build, finshed_time = candidates[-1] next_build[i] = None got += 1 yield build if num_builds is not None: if got >= num_builds: return def subscribe(self, target): self.watchers.append(target) for name in self.botmaster.builderNames: self.announceNewBuilder(target, name, self.getBuilder(name)) def unsubscribe(self, target): self.watchers.remove(target) # methods called by upstream objects def announceNewBuilder(self, target, name, builder_status): t = target.builderAdded(name, builder_status) if t: builder_status.subscribe(t) def builderAdded(self, name, basedir, tags=None, description=None): """ @rtype: L{BuilderStatus} """ builder_status = builder.BuilderStatus(name, tags, self.master, description) builder_status.setTags(tags) builder_status.description = description builder_status.master = self.master builder_status.basedir = os.path.join(bytes2unicode(self.basedir), bytes2unicode(basedir)) builder_status.name = name # it might have been updated builder_status.status = self builder_status.setBigState("offline") return builder_status def builderRemoved(self, name): for t in self.watchers: if hasattr(t, 'builderRemoved'): t.builderRemoved(name) def workerConnected(self, name): for t in self.watchers: if hasattr(t, 'workerConnected'): t.workerConnected(name) def workerDisconnected(self, name): for t in self.watchers: if hasattr(t, 'workerDisconnected'): t.workerDisconnected(name) def workerPaused(self, name): for t in self.watchers: if hasattr(t, 'workerPaused'): t.workerPaused(name) def workerUnpaused(self, name): for t in self.watchers: if hasattr(t, 'workerUnpaused'): t.workerUnpaused(name) def changeAdded(self, change): for t in self.watchers: if hasattr(t, 'changeAdded'): t.changeAdded(change) @defer.inlineCallbacks def br_consumer_cb(self, key, msg): builderid = msg['builderid'] buildername = None # convert builderid to buildername for b in self.botmaster.builders.values(): if builderid == (yield b.getBuilderId()): buildername = b.name break if buildername in self._builder_observers: brs = buildrequest.BuildRequestStatus(buildername, msg['buildrequestid'], self) for observer in self._builder_observers[buildername]: if hasattr(observer, 'requestSubmitted'): eventually(observer.requestSubmitted, brs) @defer.inlineCallbacks def change_consumer_cb(self, key, msg): # get a list of watchers - no sense querying the change # if nobody's listening interested = [t for t in self.watchers if hasattr(t, 'changeAdded')] if not interested: return chdict = yield self.master.db.changes.getChange(msg['changeid']) change = yield changes.Change.fromChdict(self.master, chdict) for t in interested: t.changeAdded(change) def asDict(self): result = { # Constant 'title': self.getTitle(), 'titleURL': self.getTitleURL(), 'buildbotURL': self.getBuildbotURL(), # TODO: self.getSchedulers() # self.getChangeSources() } return result def build_started(self, brid, buildername, build_status): if brid in self._buildreq_observers: for o in self._buildreq_observers[brid]: eventually(o, build_status) def _buildrequest_subscribe(self, brid, observer): self._buildreq_observers.add(brid, observer) def _buildrequest_unsubscribe(self, brid, observer): self._buildreq_observers.discard(brid, observer) def _buildset_waitUntilFinished(self, bsid): d = defer.Deferred() self._buildset_finished_waiters.add(bsid, d) self._maybeBuildsetFinished(bsid) return d def _maybeBuildsetFinished(self, bsid): # check bsid to see if it's successful or finished, and notify anyone # who cares if bsid not in self._buildset_finished_waiters: return d = self.master.db.buildsets.getBuildset(bsid) @d.addCallback def do_notifies(bsdict): bss = buildset.BuildSetStatus(bsdict, self) if bss.isFinished(): for d in self._buildset_finished_waiters.pop(bsid): eventually(d.callback, bss) d.addErrback(log.err, 'while notifying for buildset finishes') def _builder_subscribe(self, buildername, watcher): # should get requestSubmitted and requestCancelled self._builder_observers.add(buildername, watcher) def _builder_unsubscribe(self, buildername, watcher): self._builder_observers.discard(buildername, watcher) def bs_new_consumer_cb(self, key, msg): bsid = msg['bsid'] d = self.master.db.buildsets.getBuildset(bsid) @d.addCallback def do_notifies(bsdict): bss = buildset.BuildSetStatus(bsdict, self) for t in self.watchers: if hasattr(t, 'buildsetSubmitted'): t.buildsetSubmitted(bss) return d def bs_complete_consumer_cb(self, key, msg): self._maybeBuildsetFinished(msg['bsid']) buildbot-2.6.0/master/buildbot/status/web/000077500000000000000000000000001361162603000205205ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/status/web/__init__.py000066400000000000000000000000001361162603000226170ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/status/worker.py000066400000000000000000000114301361162603000216250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from zope.interface import implementer from buildbot import interfaces from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util.eventual import eventually @implementer(interfaces.IWorkerStatus) class WorkerStatus: admin = None host = None access_uri = None version = None connected = False graceful_shutdown = False paused = False def __init__(self, name): self.name = name self._lastMessageReceived = 0 self.runningBuilds = [] self.graceful_callbacks = [] self.pause_callbacks = [] self.connect_times = [] self.info = Properties() def getName(self): return self.name def getAdmin(self): return self.admin def getHost(self): return self.host def getAccessURI(self): return self.access_uri def getVersion(self): return self.version def isConnected(self): return self.connected def isPaused(self): return self.paused def lastMessageReceived(self): return self._lastMessageReceived def getRunningBuilds(self): return self.runningBuilds def getConnectCount(self): then = time.time() - 3600 return len([t for t in self.connect_times if t > then]) def setAdmin(self, admin): self.admin = bytes2unicode(admin) def setHost(self, host): self.host = bytes2unicode(host) def setAccessURI(self, access_uri): self.access_uri = access_uri def setVersion(self, version): self.version = version def setConnected(self, isConnected): self.connected = isConnected def setLastMessageReceived(self, when): self._lastMessageReceived = when def setPaused(self, isPaused): self.paused = isPaused for cb in self.pause_callbacks: eventually(cb, isPaused) def addPauseWatcher(self, watcher): """Add watcher to the list of watchers to be notified when the pause flag is changed.""" if watcher not in self.pause_callbacks: self.pause_callbacks.append(watcher) def removePauseWatcher(self, watcher): """Remove watcher from the list of watchers to be notified when the pause shutdown flag is changed.""" if watcher in self.pause_callbacks: self.pause_callbacks.remove(watcher) def recordConnectTime(self): # record this connect, and keep data for the last hour now = time.time() self.connect_times = [ t for t in self.connect_times if t > now - 3600] + [now] def buildStarted(self, build): self.runningBuilds.append(build) def buildFinished(self, build): self.runningBuilds.remove(build) def getGraceful(self): """Return the graceful shutdown flag""" return self.graceful_shutdown def setGraceful(self, graceful): """Set the graceful shutdown flag, and notify all the watchers""" self.graceful_shutdown = graceful for cb in self.graceful_callbacks: eventually(cb, graceful) def addGracefulWatcher(self, watcher): """Add watcher to the list of watchers to be notified when the graceful shutdown flag is changed.""" if watcher not in self.graceful_callbacks: self.graceful_callbacks.append(watcher) def removeGracefulWatcher(self, watcher): """Remove watcher from the list of watchers to be notified when the graceful shutdown flag is changed.""" if watcher in self.graceful_callbacks: self.graceful_callbacks.remove(watcher) def asDict(self): result = { # Constant 'name': self.getName(), 'access_uri': self.getAccessURI(), # Transient (since it changes when the worker reconnects) 'host': self.getHost(), 'admin': self.getAdmin(), 'version': self.getVersion(), 'connected': self.isConnected(), 'runningBuilds': [b.asDict() for b in self.getRunningBuilds()] } return result buildbot-2.6.0/master/buildbot/steps/000077500000000000000000000000001361162603000175565ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/__init__.py000066400000000000000000000000001361162603000216550ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/cmake.py000066400000000000000000000052031361162603000212100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import config from buildbot.interfaces import IRenderable from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import ShellMixin class CMake(ShellMixin, BuildStep): DEFAULT_CMAKE = 'cmake' name = 'cmake' description = ['running', 'cmake'] descriptionDone = ['cmake'] renderables = ( 'cmake', 'definitions', 'generator', 'options', 'path' ) haltOnFailure = True def __init__(self, path=None, generator=None, definitions=None, options=None, cmake=DEFAULT_CMAKE, **kwargs): self.path = path self.generator = generator if not (definitions is None or isinstance(definitions, dict) or IRenderable.providedBy(definitions)): config.error('definitions must be a dictionary or implement IRenderable') self.definitions = definitions if not (options is None or isinstance(options, (list, tuple)) or IRenderable.providedBy(options)): config.error('options must be a list, a tuple or implement IRenderable') self.options = options self.cmake = cmake kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super(CMake, self).__init__(**kwargs) @defer.inlineCallbacks def run(self): """ run CMake """ command = [self.cmake] if self.generator: command.extend([ '-G', self.generator ]) if self.path: command.append(self.path) if self.definitions is not None: for item in self.definitions.items(): command.append('-D%s=%s' % item) if self.options is not None: command.extend(self.options) cmd = yield self.makeRemoteShellCommand(command=command) yield self.runCommand(cmd) return cmd.results() buildbot-2.6.0/master/buildbot/steps/cppcheck.py000066400000000000000000000066431361162603000217210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.shell import ShellCommand class Cppcheck(ShellCommand): # Highly inspired from the Pylint step. name = "cppcheck" description = ["running", "cppcheck"] descriptionDone = ["cppcheck"] flunkingIssues = ('error',) MESSAGES = ( 'error', 'warning', 'style', 'performance', 'portability', 'information') renderables = ('binary', 'source', 'extra_args') def __init__(self, *args, **kwargs): for name, default in [('binary', 'cppcheck'), ('source', ['.']), ('enable', []), ('inconclusive', False), ('extra_args', [])]: setattr(self, name, kwargs.pop(name, default)) super().__init__(*args, **kwargs) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) command = [self.binary] command.extend(self.source) if self.enable: command.append('--enable=%s' % ','.join(self.enable)) if self.inconclusive: command.append('--inconclusive') command.extend(self.extra_args) self.setCommand(command) counts = self.counts = {} summaries = self.summaries = {} for m in self.MESSAGES: counts[m] = 0 summaries[m] = [] def logConsumer(self): line_re = re.compile( r'(?:\[.+\]: )?\((?P%s)\) .+' % '|'.join(self.MESSAGES)) while True: stream, line = yield m = line_re.match(line) if m is not None: msgsev = m.group('severity') self.summaries[msgsev].append(line) self.counts[msgsev] += 1 def createSummary(self, log): self.descriptionDone = self.descriptionDone[:] for msg in self.MESSAGES: self.setProperty('cppcheck-%s' % msg, self.counts[msg], 'Cppcheck') if not self.counts[msg]: continue self.descriptionDone.append("%s=%d" % (msg, self.counts[msg])) self.addCompleteLog(msg, '\n'.join(self.summaries[msg])) self.setProperty('cppcheck-total', sum(self.counts.values()), 'Cppcheck') def evaluateCommand(self, cmd): """ cppcheck always return 0, unless a special parameter is given """ for msg in self.flunkingIssues: if self.counts[msg] != 0: return FAILURE if self.getProperty('cppcheck-total') != 0: return WARNINGS return SUCCESS buildbot-2.6.0/master/buildbot/steps/download_secret_to_worker.py000066400000000000000000000054121361162603000254010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat from twisted.internet import defer from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.process.results import worst_status from buildbot.steps.worker import CompositeStepMixin class DownloadSecretsToWorker(BuildStep, CompositeStepMixin): renderables = ['secret_to_be_populated'] def __init__(self, populated_secret_list, **kwargs): super(DownloadSecretsToWorker, self).__init__(**kwargs) self.secret_to_be_populated = populated_secret_list @defer.inlineCallbacks def runPopulateSecrets(self): result = SUCCESS for path, secretvalue in self.secret_to_be_populated: if not isinstance(path, str): raise ValueError("Secret path %s is not a string" % path) self.secret_to_be_interpolated = secretvalue res = yield self.downloadFileContentToWorker(path, self.secret_to_be_interpolated, mode=stat.S_IRUSR | stat.S_IWUSR) result = worst_status(result, res) return result @defer.inlineCallbacks def run(self): self._start_deferred = None res = yield self.runPopulateSecrets() return res class RemoveWorkerFileSecret(BuildStep, CompositeStepMixin): def __init__(self, populated_secret_list, logEnviron=False, **kwargs): self.paths = [] for path, secret in populated_secret_list: self.paths.append(path) self.logEnviron = logEnviron super(RemoveWorkerFileSecret, self).__init__(**kwargs) @defer.inlineCallbacks def runRemoveWorkerFileSecret(self): all_results = [] for path in self.paths: res = yield self.runRmFile(path, abandonOnFailure=False) all_results.append(res) if FAILURE in all_results: result = FAILURE else: result = SUCCESS return result @defer.inlineCallbacks def run(self): self._start_deferred = None res = yield self.runRemoveWorkerFileSecret() return res buildbot-2.6.0/master/buildbot/steps/http.py000066400000000000000000000141701361162603000211120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from buildbot import config from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep # use the 'requests' lib: http://python-requests.org try: import txrequests import requests except ImportError: txrequests = None # This step uses a global Session object, which encapsulates a thread pool as # well as state such as cookies and authentication. This state may pose # problems for users, where one step may get a cookie that is subsequently used # by another step in a different build. _session = None def getSession(): global _session if _session is None: _session = txrequests.Session() reactor.addSystemEventTrigger("before", "shutdown", closeSession) return _session def setSession(session): global _session _session = session def closeSession(): global _session if _session is not None: _session.close() _session = None class HTTPStep(BuildStep): name = 'HTTPStep' description = 'Requesting' descriptionDone = 'Requested' requestsParams = ["params", "data", "json", "headers", "cookies", "files", "auth", "timeout", "allow_redirects", "proxies", "hooks", "stream", "verify", "cert"] renderables = requestsParams + ["method", "url"] session = None def __init__(self, url, method, **kwargs): if txrequests is None: config.error( "Need to install txrequest to use this step:\n\n pip install txrequests") if method not in ('POST', 'GET', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'): config.error("Wrong method given: '%s' is not known" % method) self.method = method self.url = url for param in HTTPStep.requestsParams: setattr(self, param, kwargs.pop(param, None)) super().__init__(**kwargs) def start(self): d = self.doRequest() d.addErrback(self.failed) @defer.inlineCallbacks def doRequest(self): # create a new session if it doesn't exist self.session = getSession() requestkwargs = { 'method': self.method, 'url': self.url } for param in self.requestsParams: value = getattr(self, param, None) if value is not None: requestkwargs[param] = value log = self.addLog('log') # known methods already tested in __init__ log.addHeader('Performing %s request to %s\n' % (self.method, self.url)) if self.params: log.addHeader('Parameters:\n') params = requestkwargs.get("params", {}) if params: params = sorted(params.items(), key=lambda x: x[0]) requestkwargs['params'] = params for k, v in params: log.addHeader('\t%s: %s\n' % (k, v)) data = requestkwargs.get("data", None) if data: log.addHeader('Data:\n') if isinstance(data, dict): for k, v in data.items(): log.addHeader('\t%s: %s\n' % (k, v)) else: log.addHeader('\t%s\n' % data) try: r = yield self.session.request(**requestkwargs) except requests.exceptions.ConnectionError as e: log.addStderr( 'An exception occurred while performing the request: %s' % e) self.finished(FAILURE) return if r.history: log.addStdout('\nRedirected %d times:\n\n' % len(r.history)) for rr in r.history: self.log_response(rr) log.addStdout('=' * 60 + '\n') self.log_response(r) log.finish() self.descriptionDone = ["Status code: %d" % r.status_code] if (r.status_code < 400): self.finished(SUCCESS) else: self.finished(FAILURE) def log_response(self, response): log = self.getLog('log') log.addHeader('Request Header:\n') for k, v in response.request.headers.items(): log.addHeader('\t%s: %s\n' % (k, v)) log.addStdout('URL: %s\n' % response.url) if response.status_code == requests.codes.ok: log.addStdout('Status: %s\n' % response.status_code) else: log.addStderr('Status: %s\n' % response.status_code) log.addHeader('Response Header:\n') for k, v in response.headers.items(): log.addHeader('\t%s: %s\n' % (k, v)) log.addStdout(' ------ Content ------\n%s' % response.text) self.addLog('content').addStdout(response.text) class POST(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='POST', **kwargs) class GET(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='GET', **kwargs) class PUT(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='PUT', **kwargs) class DELETE(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='DELETE', **kwargs) class HEAD(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='HEAD', **kwargs) class OPTIONS(HTTPStep): def __init__(self, url, **kwargs): super().__init__(url, method='OPTIONS', **kwargs) buildbot-2.6.0/master/buildbot/steps/master.py000066400000000000000000000203621361162603000214260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import pprint import re from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.internet.protocol import ProcessProtocol from twisted.python import runtime from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep class MasterShellCommand(BuildStep): """ Run a shell command locally - on the buildmaster. The shell command COMMAND is specified just as for a RemoteShellCommand. Note that extra logfiles are not supported. """ name = 'MasterShellCommand' description = 'Running' descriptionDone = 'Ran' descriptionSuffix = None renderables = ['command', 'env'] haltOnFailure = True flunkOnFailure = True def __init__(self, command, **kwargs): self.env = kwargs.pop('env', None) self.usePTY = kwargs.pop('usePTY', 0) self.interruptSignal = kwargs.pop('interruptSignal', 'KILL') self.logEnviron = kwargs.pop('logEnviron', True) super().__init__(**kwargs) self.command = command self.masterWorkdir = self.workdir class LocalPP(ProcessProtocol): def __init__(self, step): self.step = step def outReceived(self, data): self.step.stdio_log.addStdout(data) def errReceived(self, data): self.step.stdio_log.addStderr(data) def processEnded(self, status_object): if status_object.value.exitCode is not None: self.step.stdio_log.addHeader( "exit status %d\n" % status_object.value.exitCode) if status_object.value.signal is not None: self.step.stdio_log.addHeader( "signal %s\n" % status_object.value.signal) self.step.processEnded(status_object) def start(self): # render properties command = self.command # set up argv if isinstance(command, (str, bytes)): if runtime.platformType == 'win32': # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += [command] else: # for posix, use /bin/sh. for other non-posix, well, doesn't # hurt to try argv = ['/bin/sh', '-c', command] else: if runtime.platformType == 'win32': # allow %COMSPEC% to have args argv = os.environ['COMSPEC'].split() if '/c' not in argv: argv += ['/c'] argv += list(command) else: argv = command self.stdio_log = stdio_log = self.addLog("stdio") if isinstance(command, (str, bytes)): stdio_log.addHeader(command.strip() + "\n\n") else: stdio_log.addHeader(" ".join(command) + "\n\n") stdio_log.addHeader("** RUNNING ON BUILDMASTER **\n") stdio_log.addHeader(" in dir %s\n" % os.getcwd()) stdio_log.addHeader(" argv: %s\n" % (argv,)) self.step_status.setText(self.describe()) if self.env is None: env = os.environ else: assert isinstance(self.env, dict) env = self.env for key, v in self.env.items(): if isinstance(v, list): # Need to do os.pathsep translation. We could either do that # by replacing all incoming ':'s with os.pathsep, or by # accepting lists. I like lists better. # If it's not a string, treat it as a sequence to be # turned in to a string. self.env[key] = os.pathsep.join(self.env[key]) # do substitution on variable values matching pattern: ${name} p = re.compile(r'\${([0-9a-zA-Z_]*)}') def subst(match): return os.environ.get(match.group(1), "") newenv = {} for key, v in env.items(): if v is not None: if not isinstance(v, (str, bytes)): raise RuntimeError("'env' values must be strings or " "lists; key '%s' is incorrect" % (key,)) newenv[key] = p.sub(subst, env[key]) env = newenv if self.logEnviron: stdio_log.addHeader(" env: %r\n" % (env,)) # TODO add a timeout? self.process = reactor.spawnProcess(self.LocalPP(self), argv[0], argv, path=self.masterWorkdir, usePTY=self.usePTY, env=env) # (the LocalPP object will call processEnded for us) def processEnded(self, status_object): if status_object.value.signal is not None: self.descriptionDone = ["killed (%s)" % status_object.value.signal] self.step_status.setText(self.describe(done=True)) self.finished(FAILURE) elif status_object.value.exitCode != 0: self.descriptionDone = [ "failed (%d)" % status_object.value.exitCode] self.step_status.setText(self.describe(done=True)) self.finished(FAILURE) else: self.step_status.setText(self.describe(done=True)) self.finished(SUCCESS) def interrupt(self, reason): try: self.process.signalProcess(self.interruptSignal) except KeyError: # Process not started yet pass except error.ProcessExitedAlready: pass super().interrupt(reason) class SetProperty(BuildStep): name = 'SetProperty' description = ['Setting'] descriptionDone = ['Set'] renderables = ['property', 'value'] def __init__(self, property, value, **kwargs): super().__init__(**kwargs) self.property = property self.value = value def run(self): properties = self.build.getProperties() properties.setProperty( self.property, self.value, self.name, runtime=True) return defer.succeed(SUCCESS) class SetProperties(BuildStep): name = 'SetProperties' description = ['Setting Properties..'] descriptionDone = ['Properties Set'] renderables = ['properties'] def __init__(self, properties=None, **kwargs): super().__init__(**kwargs) self.properties = properties def run(self): if self.properties is None: return defer.succeed(SUCCESS) for k, v in self.properties.items(): self.setProperty(k, v, self.name, runtime=True) return defer.succeed(SUCCESS) class Assert(BuildStep): name = 'Assert' description = ['Checking..'] descriptionDone = ["checked"] renderables = ['check'] def __init__(self, check, **kwargs): super().__init__(**kwargs) self.check = check self.descriptionDone = ["checked {}".format(repr(self.check))] def run(self): if self.check: return defer.succeed(SUCCESS) return defer.succeed(FAILURE) class LogRenderable(BuildStep): name = 'LogRenderable' description = ['Logging'] descriptionDone = ['Logged'] renderables = ['content'] def __init__(self, content, **kwargs): super().__init__(**kwargs) self.content = content def start(self): content = pprint.pformat(self.content) self.addCompleteLog(name='Output', text=content) self.step_status.setText(self.describe(done=True)) self.finished(SUCCESS) buildbot-2.6.0/master/buildbot/steps/maxq.py000066400000000000000000000040111361162603000210720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot import config from buildbot.process import buildstep from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.shell import ShellCommand class MaxQObserver(buildstep.LogLineObserver): def __init__(self): super().__init__() self.failures = 0 def outLineReceived(self, line): if line.startswith('TEST FAILURE:'): self.failures += 1 class MaxQ(ShellCommand): flunkOnFailure = True name = "maxq" def __init__(self, testdir=None, **kwargs): if not testdir: config.error("please pass testdir") kwargs['command'] = 'run_maxq.py %s' % (testdir,) super().__init__(**kwargs) self.observer = MaxQObserver() self.addLogObserver('stdio', self.observer) def commandComplete(self, cmd): self.failures = self.observer.failures def evaluateCommand(self, cmd): # treat a nonzero exit status as a failure, if no other failures are # detected if not self.failures and cmd.didFail(): self.failures = 1 if self.failures: return FAILURE return SUCCESS def getResultSummary(self): if self.failures: return {'step': "%d maxq failures" % self.failures} return {'step': 'success'} buildbot-2.6.0/master/buildbot/steps/mswin.py000066400000000000000000000067241361162603000212760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.shell import ShellCommand class Robocopy(ShellCommand): """ Robocopy build step. This is just a wrapper around the standard shell command that will handle arguments and return codes accordingly for Robocopy. """ renderables = [ 'custom_opts', 'destination', 'exclude_dirs', 'exclude_files', 'files', 'source' ] # Robocopy exit flags (they are combined to make up the exit code) # See: http://ss64.com/nt/robocopy-exit.html return_flags = { FAILURE: [8, 16], WARNINGS: [2, 4], SUCCESS: [0, 1] } def __init__(self, source, destination, exclude=None, exclude_files=None, **kwargs): self.source = source self.destination = destination self.files = kwargs.pop('files', None) self.recursive = kwargs.pop('recursive', False) self.mirror = kwargs.pop('mirror', False) self.move = kwargs.pop('move', False) self.exclude_files = exclude_files if exclude and not exclude_files: self.exclude_files = exclude self.exclude_dirs = kwargs.pop('exclude_dirs', None) self.custom_opts = kwargs.pop('custom_opts', None) self.verbose = kwargs.pop('verbose', False) super().__init__(**kwargs) def start(self): command = ['robocopy', self.source, self.destination] if self.files: command += self.files if self.recursive: command.append('/E') if self.mirror: command.append('/MIR') if self.move: command.append('/MOVE') if self.exclude_files: command.append('/XF') command += self.exclude_files if self.exclude_dirs: command.append('/XD') command += self.exclude_dirs if self.verbose: command += ['/V', '/TS', '/FP'] if self.custom_opts: command += self.custom_opts command += ['/TEE', '/NP'] self.setCommand(command) super().start() def evaluateCommand(self, cmd): # If we have a "clean" return code, it's good. # Otherwise, look for errors first, warnings second. if cmd.rc == 0 or cmd.rc == 1: return SUCCESS for result in [FAILURE, WARNINGS]: for flag in self.return_flags[result]: if (cmd.rc & flag) == flag: return result log.msg("Unknown return code for Robocopy: %s" % cmd.rc) return EXCEPTION buildbot-2.6.0/master/buildbot/steps/mtrlogobserver.py000066400000000000000000000431201361162603000232040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import sys from twisted.enterprise import adbapi from twisted.internet import defer from twisted.python import log from buildbot.process.buildstep import LogLineObserver from buildbot.steps.shell import Test class EqConnectionPool(adbapi.ConnectionPool): """This class works the same way as twisted.enterprise.adbapi.ConnectionPool. But it adds the ability to compare connection pools for equality (by comparing the arguments passed to the constructor). This is useful when passing the ConnectionPool to a BuildStep, as otherwise Buildbot will consider the buildstep (and hence the containing buildfactory) to have changed every time the configuration is reloaded. It also sets some defaults differently from adbapi.ConnectionPool that are more suitable for use in MTR. """ def __init__(self, *args, **kwargs): self._eqKey = (args, kwargs) super().__init__(cp_reconnect=True, cp_min=1, cp_max=3, *args, **kwargs) def __eq__(self, other): if isinstance(other, EqConnectionPool): return self._eqKey == other._eqKey return False def __ne__(self, other): return not self.__eq__(other) class MtrTestFailData: def __init__(self, testname, variant, result, info, text, callback): self.testname = testname self.variant = variant self.result = result self.info = info self.text = text self.callback = callback def add(self, line): self.text += line def fireCallback(self): return self.callback(self.testname, self.variant, self.result, self.info, self.text) class MtrLogObserver(LogLineObserver): """ Class implementing a log observer (can be passed to BuildStep.addLogObserver(). It parses the output of mysql-test-run.pl as used in MySQL, MariaDB, Drizzle, etc. It counts number of tests run and uses it to provide more accurate completion estimates. It parses out test failures from the output and summarizes the results on the Waterfall page. It also passes the information to methods that can be overridden in a subclass to do further processing on the information.""" _line_re = re.compile( r"^([-._0-9a-zA-z]+)( '[-_ a-zA-Z]+')?\s+(w[0-9]+\s+)?\[ (fail|pass) \]\s*(.*)$") _line_re2 = re.compile( r"^[-._0-9a-zA-z]+( '[-_ a-zA-Z]+')?\s+(w[0-9]+\s+)?\[ [-a-z]+ \]") _line_re3 = re.compile( r"^\*\*\*Warnings generated in error logs during shutdown after running tests: (.*)") _line_re4 = re.compile(r"^The servers were restarted [0-9]+ times$") _line_re5 = re.compile(r"^Only\s+[0-9]+\s+of\s+[0-9]+\s+completed.$") def __init__(self, textLimit=5, testNameLimit=16, testType=None): self.textLimit = textLimit self.testNameLimit = testNameLimit self.testType = testType self.numTests = 0 self.testFail = None self.failList = [] self.warnList = [] super().__init__() def setLog(self, loog): super().setLog(loog) d = loog.waitUntilFinished() d.addCallback(lambda l: self.closeTestFail()) def outLineReceived(self, line): stripLine = line.strip("\r\n") m = self._line_re.search(stripLine) if m: testname, variant, worker, result, info = m.groups() self.closeTestFail() self.numTests += 1 self.step.setProgress('tests', self.numTests) if result == "fail": if variant is None: variant = "" else: variant = variant[2:-1] self.openTestFail( testname, variant, result, info, stripLine + "\n") else: m = self._line_re3.search(stripLine) # pylint: disable=too-many-boolean-expressions if m: stuff = m.group(1) self.closeTestFail() testList = stuff.split(" ") self.doCollectWarningTests(testList) elif (self._line_re2.search(stripLine) or self._line_re4.search(stripLine) or self._line_re5.search(stripLine) or stripLine == "Test suite timeout! Terminating..." or stripLine.startswith("mysql-test-run: *** ERROR: Not all tests completed") or (stripLine.startswith("-" * 60) and self.testFail is not None)): self.closeTestFail() else: self.addTestFailOutput(stripLine + "\n") def openTestFail(self, testname, variant, result, info, line): self.testFail = MtrTestFailData( testname, variant, result, info, line, self.doCollectTestFail) def addTestFailOutput(self, line): if self.testFail is not None: self.testFail.add(line) def closeTestFail(self): if self.testFail is not None: self.testFail.fireCallback() self.testFail = None def addToText(self, src, dst): lastOne = None count = 0 for t in src: if t != lastOne: dst.append(t) count += 1 if count >= self.textLimit: break def makeText(self, done): if done: text = ["test"] else: text = ["testing"] if self.testType: text.append(self.testType) fails = sorted(self.failList[:]) self.addToText(fails, text) warns = sorted(self.warnList[:]) self.addToText(warns, text) return text # Update waterfall status. def updateText(self): self.step.step_status.setText(self.makeText(False)) strip_re = re.compile(r"^[a-z]+\.") def displayTestName(self, testname): displayTestName = self.strip_re.sub("", testname) if len(displayTestName) > self.testNameLimit: displayTestName = displayTestName[ :(self.testNameLimit - 2)] + "..." return displayTestName def doCollectTestFail(self, testname, variant, result, info, text): self.failList.append("F:" + self.displayTestName(testname)) self.updateText() self.collectTestFail(testname, variant, result, info, text) def doCollectWarningTests(self, testList): for t in testList: self.warnList.append("W:" + self.displayTestName(t)) self.updateText() self.collectWarningTests(testList) # These two methods are overridden to actually do something with the data. def collectTestFail(self, testname, variant, result, info, text): pass def collectWarningTests(self, testList): pass class MTR(Test): """ Build step that runs mysql-test-run.pl, as used in MySQL, Drizzle, MariaDB, etc. It uses class MtrLogObserver to parse test results out from the output of mysql-test-run.pl, providing better completion time estimates and summarizing test failures on the waterfall page. It also provides access to mysqld server error logs from the test run to help debugging any problems. Optionally, it can insert into a database data about the test run, including details of any test failures. Parameters: textLimit Maximum number of test failures to show on the waterfall page (to not flood the page in case of a large number of test failures. Defaults to 5. testNameLimit Maximum length of test names to show unabbreviated in the waterfall page, to avoid excessive column width. Defaults to 16. parallel Value of --parallel option used for mysql-test-run.pl (number of processes used to run the test suite in parallel). Defaults to 4. This is used to determine the number of server error log files to download from the worker. Specifying a too high value does not hurt (as nonexisting error logs will be ignored), however if using --parallel value greater than the default it needs to be specified, or some server error logs will be missing. dbpool An instance of twisted.enterprise.adbapi.ConnectionPool, or None. Defaults to None. If specified, results are inserted into the database using the ConnectionPool. The class process.mtrlogobserver.EqConnectionPool subclass of ConnectionPool can be useful to pass as value for dbpool, to avoid having config reloads think the Buildstep is changed just because it gets a new ConnectionPool instance (even though connection parameters are unchanged). autoCreateTables Boolean, defaults to False. If True (and dbpool is specified), the necessary database tables will be created automatically if they do not exist already. Alternatively, the tables can be created manually from the SQL statements found in the mtrlogobserver.py source file. test_type test_info Two descriptive strings that will be inserted in the database tables if dbpool is specified. The test_type string, if specified, will also appear on the waterfall page.""" renderables = ['mtr_subdir', 'parallel'] def __init__(self, dbpool=None, test_type=None, test_info="", description=None, descriptionDone=None, autoCreateTables=False, textLimit=5, testNameLimit=16, parallel=4, logfiles=None, lazylogfiles=True, warningPattern="MTR's internal check of the test case '.*' failed", mtr_subdir="mysql-test", **kwargs): if logfiles is None: logfiles = {} if description is None: description = ["testing"] if test_type: description.append(test_type) if descriptionDone is None: descriptionDone = ["test"] if test_type: descriptionDone.append(test_type) super().__init__(logfiles=logfiles, lazylogfiles=lazylogfiles, description=description, descriptionDone=descriptionDone, warningPattern=warningPattern, **kwargs) self.dbpool = dbpool self.test_type = test_type self.test_info = test_info self.autoCreateTables = autoCreateTables self.textLimit = textLimit self.testNameLimit = testNameLimit self.parallel = parallel self.mtr_subdir = mtr_subdir self.progressMetrics += ('tests',) def start(self): # Add mysql server logfiles. for mtr in range(0, self.parallel + 1): for mysqld in range(1, 4 + 1): if mtr == 0: logname = "mysqld.%d.err" % mysqld filename = "var/log/mysqld.%d.err" % mysqld else: logname = "mysqld.%d.err.%d" % (mysqld, mtr) filename = "var/%d/log/mysqld.%d.err" % (mtr, mysqld) self.addLogFile(logname, self.mtr_subdir + "/" + filename) self.myMtr = self.MyMtrLogObserver(textLimit=self.textLimit, testNameLimit=self.testNameLimit, testType=self.test_type) self.addLogObserver("stdio", self.myMtr) # Insert a row for this test run into the database and set up # build properties, then start the command proper. d = self.registerInDB() d.addCallback(self.afterRegisterInDB) d.addErrback(self.failed) def getText(self, command, results): return self.myMtr.makeText(True) def runInteractionWithRetry(self, actionFn, *args, **kw): """ Run a database transaction with dbpool.runInteraction, but retry the transaction in case of a temporary error (like connection lost). This is needed to be robust against things like database connection idle timeouts. The passed callable that implements the transaction must be retryable, ie. it must not have any destructive side effects in the case where an exception is thrown and/or rollback occurs that would prevent it from functioning correctly when called again.""" def runWithRetry(txn, *args, **kw): retryCount = 0 while(True): try: return actionFn(txn, *args, **kw) except txn.OperationalError: retryCount += 1 if retryCount >= 5: raise excType, excValue, excTraceback = sys.exc_info() log.msg("Database transaction failed (caught exception %s(%s)), retrying ..." % ( excType, excValue)) txn.close() txn.reconnect() txn.reopen() return self.dbpool.runInteraction(runWithRetry, *args, **kw) def runQueryWithRetry(self, *args, **kw): """ Run a database query, like with dbpool.runQuery, but retry the query in case of a temporary error (like connection lost). This is needed to be robust against things like database connection idle timeouts.""" def runQuery(txn, *args, **kw): txn.execute(*args, **kw) return txn.fetchall() return self.runInteractionWithRetry(runQuery, *args, **kw) def registerInDB(self): if self.dbpool: return self.runInteractionWithRetry(self.doRegisterInDB) return defer.succeed(0) # The real database work is done in a thread in a synchronous way. def doRegisterInDB(self, txn): # Auto create tables. # This is off by default, as it gives warnings in log file # about tables already existing (and I did not find the issue # important enough to find a better fix). if self.autoCreateTables: txn.execute(""" CREATE TABLE IF NOT EXISTS test_run( id INT PRIMARY KEY AUTO_INCREMENT, branch VARCHAR(100), revision VARCHAR(32) NOT NULL, platform VARCHAR(100) NOT NULL, dt TIMESTAMP NOT NULL, bbnum INT NOT NULL, typ VARCHAR(32) NOT NULL, info VARCHAR(255), KEY (branch, revision), KEY (dt), KEY (platform, bbnum) ) ENGINE=innodb """) txn.execute(""" CREATE TABLE IF NOT EXISTS test_failure( test_run_id INT NOT NULL, test_name VARCHAR(100) NOT NULL, test_variant VARCHAR(16) NOT NULL, info_text VARCHAR(255), failure_text TEXT, PRIMARY KEY (test_run_id, test_name, test_variant) ) ENGINE=innodb """) txn.execute(""" CREATE TABLE IF NOT EXISTS test_warnings( test_run_id INT NOT NULL, list_id INT NOT NULL, list_idx INT NOT NULL, test_name VARCHAR(100) NOT NULL, PRIMARY KEY (test_run_id, list_id, list_idx) ) ENGINE=innodb """) revision = self.getProperty("got_revision") if revision is None: revision = self.getProperty("revision") typ = "mtr" if self.test_type: typ = self.test_type txn.execute(""" INSERT INTO test_run(branch, revision, platform, dt, bbnum, typ, info) VALUES (%s, %s, %s, CURRENT_TIMESTAMP(), %s, %s, %s) """, (self.getProperty("branch"), revision, self.getProperty("buildername"), self.getProperty("buildnumber"), typ, self.test_info)) return txn.lastrowid def afterRegisterInDB(self, insert_id): self.setProperty("mtr_id", insert_id) self.setProperty("mtr_warn_id", 0) super().start() def reportError(self, err): log.msg("Error in async insert into database: %s" % err) class MyMtrLogObserver(MtrLogObserver): def collectTestFail(self, testname, variant, result, info, text): # Insert asynchronously into database. dbpool = self.step.dbpool if dbpool is None: return defer.succeed(None) run_id = self.step.getProperty("mtr_id") if variant is None: variant = "" d = self.step.runQueryWithRetry(""" INSERT INTO test_failure(test_run_id, test_name, test_variant, info_text, failure_text) VALUES (%s, %s, %s, %s, %s) """, (run_id, testname, variant, info, text)) d.addErrback(self.step.reportError) return d def collectWarningTests(self, testList): # Insert asynchronously into database. dbpool = self.step.dbpool if dbpool is None: return defer.succeed(None) run_id = self.step.getProperty("mtr_id") warn_id = self.step.getProperty("mtr_warn_id") self.step.setProperty("mtr_warn_id", warn_id + 1) q = ("INSERT INTO test_warnings(test_run_id, list_id, list_idx, test_name) " + "VALUES " + ", ".join(map(lambda x: "(%s, %s, %s, %s)", testList))) v = [] idx = 0 for t in testList: v.extend([run_id, warn_id, idx, t]) idx = idx + 1 d = self.step.runQueryWithRetry(q, tuple(v)) d.addErrback(self.step.reportError) return d buildbot-2.6.0/master/buildbot/steps/package/000077500000000000000000000000001361162603000211515ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/package/__init__.py000066400000000000000000000014771361162603000232730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps specific to package formats. """ buildbot-2.6.0/master/buildbot/steps/package/deb/000077500000000000000000000000001361162603000217035ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/package/deb/__init__.py000066400000000000000000000000001361162603000240020ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/package/deb/lintian.py000066400000000000000000000061711361162603000237200ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder """ Steps and objects related to lintian """ from buildbot import config from buildbot.process import buildstep from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.package import util as pkgutil from buildbot.steps.shell import ShellCommand class MaxQObserver(buildstep.LogLineObserver): def __init__(self): super().__init__() self.failures = 0 def outLineReceived(self, line): if line.startswith('TEST FAILURE:'): self.failures += 1 class DebLintian(ShellCommand): name = "lintian" description = ["Lintian running"] descriptionDone = ["Lintian"] fileloc = None suppressTags = [] warnCount = 0 errCount = 0 flunkOnFailure = False warnOnFailure = True def __init__(self, fileloc=None, suppressTags=None, **kwargs): """ Create the DebLintian object. @type fileloc: str @param fileloc: Location of the .deb or .changes to test. @type suppressTags: list @param suppressTags: List of tags to suppress. @type kwargs: dict @param kwargs: all other keyword arguments. """ super().__init__(**kwargs) if fileloc: self.fileloc = fileloc if suppressTags: self.suppressTags = suppressTags if not self.fileloc: config.error("You must specify a fileloc") self.command = ["lintian", "-v", self.fileloc] if self.suppressTags: for tag in self.suppressTags: self.command += ['--suppress-tags', tag] self.obs = pkgutil.WEObserver() self.addLogObserver('stdio', self.obs) def createSummary(self, log): """ Create nice summary logs. @param log: log to create summary off of. """ warnings = self.obs.warnings errors = self.obs.errors if warnings: self.addCompleteLog('%d Warnings' % len(warnings), "\n".join(warnings)) self.warnCount = len(warnings) if errors: self.addCompleteLog('%d Errors' % len(errors), "\n".join(errors)) self.errCount = len(errors) def evaluateCommand(self, cmd): if (cmd.rc != 0 or self.errCount): return FAILURE if self.warnCount: return WARNINGS return SUCCESS buildbot-2.6.0/master/buildbot/steps/package/deb/pbuilder.py000066400000000000000000000172131361162603000240670ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder """ Steps and objects related to pbuilder """ import re import stat import time from twisted.python import log from buildbot import config from buildbot.process import logobserver from buildbot.process import remotecommand from buildbot.process.buildstep import FAILURE from buildbot.steps.shell import WarningCountingShellCommand class DebPbuilder(WarningCountingShellCommand): """Build a debian package with pbuilder inside of a chroot.""" name = "pbuilder" haltOnFailure = 1 flunkOnFailure = 1 description = ["building"] descriptionDone = ["built"] warningPattern = r".*(warning[: ]|\sW: ).*" architecture = None distribution = 'stable' basetgz = "/var/cache/pbuilder/%(distribution)s-%(architecture)s-buildbot.tgz" mirror = "http://cdn.debian.net/debian/" extrapackages = [] keyring = None components = None maxAge = 60 * 60 * 24 * 7 pbuilder = '/usr/sbin/pbuilder' baseOption = '--basetgz' def __init__(self, architecture=None, distribution=None, basetgz=None, mirror=None, extrapackages=None, keyring=None, components=None, **kwargs): """ Creates the DebPbuilder object. @type architecture: str @param architecture: the name of the architecture to build @type distribution: str @param distribution: the man of the distribution to use @type basetgz: str @param basetgz: the path or path template of the basetgz @type mirror: str @param mirror: the mirror for building basetgz @type extrapackages: list @param extrapackages: adds packages specified to buildroot @type keyring: str @param keyring: keyring file to use for verification @type components: str @param components: components to use for chroot creation @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if architecture: self.architecture = architecture if distribution: self.distribution = distribution if mirror: self.mirror = mirror if extrapackages: self.extrapackages = extrapackages if keyring: self.keyring = keyring if components: self.components = components if self.architecture: kwargs['architecture'] = self.architecture else: kwargs['architecture'] = 'local' kwargs['distribution'] = self.distribution if basetgz: self.basetgz = basetgz % kwargs else: self.basetgz = self.basetgz % kwargs if not self.distribution: config.error("You must specify a distribution.") self.command = [ 'pdebuild', '--buildresult', '.', '--pbuilder', self.pbuilder] if self.architecture: self.command += ['--architecture', self.architecture] self.command += ['--', '--buildresult', '.', self.baseOption, self.basetgz] if self.extrapackages: self.command += ['--extrapackages', " ".join(self.extrapackages)] self.suppressions.append( (None, re.compile(r"\.pbuilderrc does not exist"), None, None)) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) # Check for Basetgz def start(self): cmd = remotecommand.RemoteCommand('stat', {'file': self.basetgz}) d = self.runCommand(cmd) d.addCallback(lambda res: self.checkBasetgz(cmd)) d.addErrback(self.failed) return d def checkBasetgz(self, cmd): if cmd.rc != 0: log.msg("basetgz not found, initializing it.") command = ['sudo', self.pbuilder, '--create', self.baseOption, self.basetgz, '--distribution', self.distribution, '--mirror', self.mirror] if self.architecture: command += ['--architecture', self.architecture] if self.extrapackages: command += ['--extrapackages', " ".join(self.extrapackages)] if self.keyring: command += ['--debootstrapopts', "--keyring=%s" % self.keyring] if self.components: command += ['--components', self.components] cmd = remotecommand.RemoteShellCommand(self.workdir, command) stdio_log = stdio_log = self.addLog("pbuilder") cmd.useLog(stdio_log, True, "stdio") d = self.runCommand(cmd) self.step_status.setText(["PBuilder create."]) d.addCallback(lambda res: self.startBuild(cmd)) return d s = cmd.updates["stat"][-1] # basetgz will be a file when running in pbuilder # and a directory in case of cowbuilder if stat.S_ISREG(s[stat.ST_MODE]) or stat.S_ISDIR(s[stat.ST_MODE]): log.msg("%s found." % self.basetgz) age = time.time() - s[stat.ST_MTIME] if age >= self.maxAge: log.msg("basetgz outdated, updating") command = ['sudo', self.pbuilder, '--update', self.baseOption, self.basetgz] cmd = remotecommand.RemoteShellCommand(self.workdir, command) stdio_log = stdio_log = self.addLog("pbuilder") cmd.useLog(stdio_log, True, "stdio") d = self.runCommand(cmd) d.addCallback(lambda res: self.startBuild(cmd)) return d return self.startBuild(cmd) else: log.msg("%s is not a file or a directory." % self.basetgz) self.finished(FAILURE) def startBuild(self, cmd): if cmd.rc != 0: log.msg("Failure when running %s." % cmd) self.finished(FAILURE) else: return super().start() def logConsumer(self): r = re.compile(r"dpkg-genchanges >\.\./(.+\.changes)") while True: stream, line = yield mo = r.search(line) if mo: self.setProperty("deb-changes", mo.group(1), "DebPbuilder") class DebCowbuilder(DebPbuilder): """Build a debian package with cowbuilder inside of a chroot.""" name = "cowbuilder" basetgz = "/var/cache/pbuilder/%(distribution)s-%(architecture)s-buildbot.cow/" pbuilder = '/usr/sbin/cowbuilder' baseOption = '--basepath' class UbuPbuilder(DebPbuilder): """Build a Ubuntu package with pbuilder inside of a chroot.""" distribution = None mirror = "http://archive.ubuntu.com/ubuntu/" components = "main universe" class UbuCowbuilder(DebCowbuilder): """Build a Ubuntu package with cowbuilder inside of a chroot.""" distribution = None mirror = "http://archive.ubuntu.com/ubuntu/" components = "main universe" buildbot-2.6.0/master/buildbot/steps/package/rpm/000077500000000000000000000000001361162603000217475ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/package/rpm/__init__.py000066400000000000000000000022461361162603000240640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps specific to the rpm format. """ from buildbot.steps.package.rpm.mock import MockBuildSRPM from buildbot.steps.package.rpm.mock import MockRebuild from buildbot.steps.package.rpm.rpmbuild import RpmBuild from buildbot.steps.package.rpm.rpmlint import RpmLint from buildbot.steps.package.rpm.rpmspec import RpmSpec __all__ = ['RpmBuild', 'RpmSpec', 'RpmLint', 'MockBuildSRPM', 'MockRebuild'] buildbot-2.6.0/master/buildbot/steps/package/rpm/mock.py000066400000000000000000000131461361162603000232570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder """ Steps and objects related to mock building. """ import re from buildbot import config from buildbot.process import logobserver from buildbot.process import remotecommand from buildbot.steps.shell import ShellCommand class MockStateObserver(logobserver.LogLineObserver): _line_re = re.compile(r'^.*State Changed: (.*)$') def outLineReceived(self, line): m = self._line_re.search(line.strip()) if m: state = m.group(1) if not state == 'end': self.step.descriptionSuffix = ["[%s]" % m.group(1)] else: self.step.descriptionSuffix = None self.step.step_status.setText(self.step.describe(False)) class Mock(ShellCommand): """Add the mock logfiles and clean them if they already exist. Add support for the root and resultdir parameter of mock.""" name = "mock" renderables = ["root", "resultdir"] haltOnFailure = 1 flunkOnFailure = 1 mock_logfiles = ['build.log', 'root.log', 'state.log'] root = None resultdir = None def __init__(self, root=None, resultdir=None, **kwargs): """ Creates the Mock object. @type root: str @param root: the name of the mock buildroot @type resultdir: str @param resultdir: the path of the result dir @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if root: self.root = root if resultdir: self.resultdir = resultdir if not self.root: config.error("You must specify a mock root") self.command = ['mock', '--root', self.root] if self.resultdir: self.command += ['--resultdir', self.resultdir] def start(self): """ Try to remove the old mock logs first. """ if self.resultdir: for lname in self.mock_logfiles: self.logfiles[lname] = self.build.path_module.join(self.resultdir, lname) else: for lname in self.mock_logfiles: self.logfiles[lname] = lname self.addLogObserver('state.log', MockStateObserver()) cmd = remotecommand.RemoteCommand('rmdir', {'dir': [self.build.path_module.join('build', self.logfiles[l]) for l in self.mock_logfiles]}) d = self.runCommand(cmd) # must resolve super() outside of the callback context. super_ = super() @d.addCallback def removeDone(cmd): super_.start() d.addErrback(self.failed) class MockBuildSRPM(Mock): """Build a srpm within a mock. Requires a spec file and a sources dir.""" name = "mockbuildsrpm" description = ["mock buildsrpm"] descriptionDone = ["mock buildsrpm"] spec = None sources = '.' def __init__(self, spec=None, sources=None, **kwargs): """ Creates the MockBuildSRPM object. @type spec: str @param spec: the path of the specfiles. @type sources: str @param sources: the path of the sources dir. @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if spec: self.spec = spec if sources: self.sources = sources if not self.spec: config.error("You must specify a spec file") if not self.sources: config.error("You must specify a sources dir") self.command += ['--buildsrpm', '--spec', self.spec, '--sources', self.sources] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def logConsumer(self): r = re.compile(r"Wrote: .*/([^/]*.src.rpm)") while True: stream, line = yield m = r.search(line) if m: self.setProperty("srpm", m.group(1), 'MockBuildSRPM') class MockRebuild(Mock): """Rebuild a srpm within a mock. Requires a srpm file.""" name = "mock" description = ["mock rebuilding srpm"] descriptionDone = ["mock rebuild srpm"] srpm = None def __init__(self, srpm=None, **kwargs): """ Creates the MockRebuildRPM object. @type srpm: str @param srpm: the path of the srpm file. @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) if srpm: self.srpm = srpm if not self.srpm: config.error("You must specify a srpm") self.command += ['--rebuild', self.srpm] buildbot-2.6.0/master/buildbot/steps/package/rpm/rpmbuild.py000066400000000000000000000137251361162603000241470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Dan Radez # Portions Copyright Steve 'Ashcrow' Milner import os from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.steps.shell import ShellCommand class RpmBuild(ShellCommand): """ RpmBuild build step. """ renderables = ['dist'] name = "rpmbuilder" haltOnFailure = 1 flunkOnFailure = 1 description = ["RPMBUILD"] descriptionDone = ["RPMBUILD"] def __init__(self, specfile=None, topdir='`pwd`', builddir='`pwd`', rpmdir='`pwd`', sourcedir='`pwd`', specdir='`pwd`', srcrpmdir='`pwd`', dist='.el6', define=None, autoRelease=False, vcsRevision=False, **kwargs): """ Create the RpmBuild object. @type specfile: str @param specfile: location of the specfile to build @type topdir: str @param topdir: define the _topdir rpm parameter @type builddir: str @param builddir: define the _builddir rpm parameter @type rpmdir: str @param rpmdir: define the _rpmdir rpm parameter @type sourcedir: str @param sourcedir: define the _sourcedir rpm parameter @type specdir: str @param specdir: define the _specdir rpm parameter @type srcrpmdir: str @param srcrpmdir: define the _srcrpmdir rpm parameter @type dist: str @param dist: define the dist string. @type define: dict @param define: additional parameters to define @type autoRelease: boolean @param autoRelease: Use auto incrementing release numbers. @type vcsRevision: boolean @param vcsRevision: Use vcs version number as revision number. """ super().__init__(**kwargs) self.dist = dist self.base_rpmbuild = ( 'rpmbuild --define "_topdir %s" --define "_builddir %s"' ' --define "_rpmdir %s" --define "_sourcedir %s"' ' --define "_specdir %s" --define "_srcrpmdir %s"' % (topdir, builddir, rpmdir, sourcedir, specdir, srcrpmdir)) if define is None: define = {} for k, v in define.items(): self.base_rpmbuild += " --define \"{} {}\"".format(k, v) self.specfile = specfile self.autoRelease = autoRelease self.vcsRevision = vcsRevision if not self.specfile: config.error("You must specify a specfile") self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def start(self): rpm_extras_dict = {} rpm_extras_dict['dist'] = self.dist if self.autoRelease: relfile = '%s.release' % ( os.path.basename(self.specfile).split('.')[0]) try: with open(relfile, 'r') as rfile: rel = int(rfile.readline().strip()) except (IOError, TypeError, ValueError): rel = 0 rpm_extras_dict['_release'] = rel with open(relfile, 'w') as rfile: rfile.write(str(rel + 1)) if self.vcsRevision: revision = self.getProperty('got_revision') # only do this in the case where there's a single codebase if revision and not isinstance(revision, dict): rpm_extras_dict['_revision'] = revision self.rpmbuild = self.base_rpmbuild # The unit tests expect a certain order, so we sort the dict to keep # format the same every time for k, v in sorted(rpm_extras_dict.items()): self.rpmbuild = '{0} --define "{1} {2}"'.format( self.rpmbuild, k, v) self.rpmbuild = '{0} -ba {1}'.format(self.rpmbuild, self.specfile) self.command = self.rpmbuild # create the actual RemoteShellCommand instance now kwargs = self.remote_kwargs kwargs['command'] = self.command kwargs['workdir'] = self.workdir cmd = buildstep.RemoteShellCommand(**kwargs) self.setupEnvironment(cmd) self.startCommand(cmd) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def logConsumer(self): rpm_prefixes = ['Provides:', 'Requires(', 'Requires:', 'Checking for unpackaged', 'Wrote:', 'Executing(%', '+ ', 'Processing files:'] rpm_err_pfx = [' ', 'RPM build errors:', 'error: '] self.rpmcmdlog = [] self.rpmerrors = [] while True: stream, line = yield for pfx in rpm_prefixes: if line.startswith(pfx): self.rpmcmdlog.append(line) break for err in rpm_err_pfx: if line.startswith(err): self.rpmerrors.append(line) break def createSummary(self, log): self.addCompleteLog('RPM Command Log', "\n".join(self.rpmcmdlog)) if self.rpmerrors: self.addCompleteLog('RPM Errors', "\n".join(self.rpmerrors)) buildbot-2.6.0/master/buildbot/steps/package/rpm/rpmlint.py000066400000000000000000000046041361162603000240120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Steve 'Ashcrow' Milner """ Steps and objects related to rpmlint. """ from buildbot.steps.package import util as pkgutil from buildbot.steps.shell import Test class RpmLint(Test): """ Rpmlint build step. """ name = "rpmlint" description = ["Checking for RPM/SPEC issues"] descriptionDone = ["Finished checking RPM/SPEC issues"] fileloc = '.' config = None def __init__(self, fileloc=None, config=None, **kwargs): """ Create the Rpmlint object. @type fileloc: str @param fileloc: Location glob of the specs or rpms. @type config: str @param config: path to the rpmlint user config. @type kwargs: dict @param fileloc: all other keyword arguments. """ super().__init__(**kwargs) if fileloc: self.fileloc = fileloc if config: self.config = config self.addFactoryArguments(fileloc=fileloc, config=config) self.command = ["rpmlint", "-i"] if self.config: self.command += ['-f', self.config] self.command.append(self.fileloc) self.obs = pkgutil.WEObserver() self.addLogObserver('stdio', self.obs) def createSummary(self, log): """ Create nice summary logs. @param log: log to create summary off of. """ warnings = self.obs.warnings errors = [] if warnings: self.addCompleteLog('%d Warnings' % len(warnings), "\n".join(warnings)) if errors: self.addCompleteLog('%d Errors' % len(errors), "\n".join(errors)) buildbot-2.6.0/master/buildbot/steps/package/rpm/rpmspec.py000066400000000000000000000050021361162603000237670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Dan Radez # Portions Copyright Steve 'Ashcrow' Milner """ library to populate parameters from and rpmspec file into a memory structure """ import re from buildbot.steps.shell import ShellCommand class RpmSpec(ShellCommand): """ read parameters out of an rpm spec file """ # initialize spec info vars and get them from the spec file n_regex = re.compile(r'^Name:[ ]*([^\s]*)') v_regex = re.compile(r'^Version:[ ]*([0-9\.]*)') def __init__(self, specfile=None, **kwargs): """ Creates the RpmSpec object. @type specfile: str @param specfile: the name of the specfile to get the package name and version from @type kwargs: dict @param kwargs: All further keyword arguments. """ super().__init__(**kwargs) self.specfile = specfile self._pkg_name = None self._pkg_version = None self._loaded = False def load(self): """ call this function after the file exists to populate properties """ # If we are given a string, open it up else assume it's something we # can call read on. if isinstance(self.specfile, str): f = open(self.specfile, 'r') else: f = self.specfile for line in f: if self.v_regex.match(line): self._pkg_version = self.v_regex.match(line).group(1) if self.n_regex.match(line): self._pkg_name = self.n_regex.match(line).group(1) f.close() self._loaded = True # Read-only properties loaded = property(lambda self: self._loaded) pkg_name = property(lambda self: self._pkg_name) pkg_version = property(lambda self: self._pkg_version) buildbot-2.6.0/master/buildbot/steps/package/util.py000066400000000000000000000021561361162603000225040ustar00rootroot00000000000000# This program is free software; you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Marius Rieder from buildbot.process import logobserver class WEObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.warnings = [] self.errors = [] def outLineReceived(self, line): if line.startswith('W: '): self.warnings.append(line) elif line.startswith('E: '): self.errors.append(line) buildbot-2.6.0/master/buildbot/steps/python.py000066400000000000000000000306401361162603000214540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot import config from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.shell import ShellCommand class BuildEPYDoc(ShellCommand): name = "epydoc" command = ["make", "epydocs"] description = ["building", "epydocs"] descriptionDone = ["epydoc"] def __init__(self, **kwargs): super().__init__(**kwargs) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def logConsumer(self): self.import_errors = 0 self.warnings = 0 self.errors = 0 while True: stream, line = yield if line.startswith("Error importing "): self.import_errors += 1 if line.find("Warning: ") != -1: self.warnings += 1 if line.find("Error: ") != -1: self.errors += 1 def createSummary(self, log): self.descriptionDone = self.descriptionDone[:] if self.import_errors: self.descriptionDone.append("ierr=%d" % self.import_errors) if self.warnings: self.descriptionDone.append("warn=%d" % self.warnings) if self.errors: self.descriptionDone.append("err=%d" % self.errors) def evaluateCommand(self, cmd): if cmd.didFail(): return FAILURE if self.warnings or self.errors: return WARNINGS return SUCCESS class PyFlakes(ShellCommand): name = "pyflakes" command = ["make", "pyflakes"] description = ["running", "pyflakes"] descriptionDone = ["pyflakes"] flunkOnFailure = False # any pyflakes lines like this cause FAILURE _flunkingIssues = ("undefined",) _MESSAGES = ("unused", "undefined", "redefs", "import*", "misc") def __init__(self, *args, **kwargs): # PyFlakes return 1 for both warnings and errors. We # categorize this initially as WARNINGS so that # evaluateCommand below can inspect the results more closely. kwargs['decodeRC'] = {0: SUCCESS, 1: WARNINGS} super().__init__(*args, **kwargs) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) counts = self.counts = {} summaries = self.summaries = {} for m in self._MESSAGES: counts[m] = 0 summaries[m] = [] # we need a separate variable for syntax errors self._hasSyntaxError = False def logConsumer(self): counts = self.counts summaries = self.summaries first = True while True: stream, line = yield if stream == 'h': continue # the first few lines might contain echoed commands from a 'make # pyflakes' step, so don't count these as warnings. Stop ignoring # the initial lines as soon as we see one with a colon. if first: if ':' in line: # there's the colon, this is the first real line first = False # fall through and parse the line else: # skip this line, keep skipping non-colon lines continue if line.find("imported but unused") != -1: m = "unused" elif line.find("*' used; unable to detect undefined names") != -1: m = "import*" elif line.find("undefined name") != -1: m = "undefined" elif line.find("redefinition of unused") != -1: m = "redefs" elif line.find("invalid syntax") != -1: self._hasSyntaxError = True # we can do this, because if a syntax error occurs # the output will only contain the info about it, nothing else m = "misc" else: m = "misc" summaries[m].append(line) counts[m] += 1 def createSummary(self, log): counts, summaries = self.counts, self.summaries self.descriptionDone = self.descriptionDone[:] # we log 'misc' as syntax-error if self._hasSyntaxError: self.addCompleteLog("syntax-error", "\n".join(summaries['misc'])) else: for m in self._MESSAGES: if counts[m]: self.descriptionDone.append("%s=%d" % (m, counts[m])) self.addCompleteLog(m, "\n".join(summaries[m])) self.setProperty("pyflakes-%s" % m, counts[m], "pyflakes") self.setProperty("pyflakes-total", sum(counts.values()), "pyflakes") def evaluateCommand(self, cmd): if cmd.didFail() or self._hasSyntaxError: return FAILURE for m in self._flunkingIssues: if self.getProperty("pyflakes-%s" % m): return FAILURE if self.getProperty("pyflakes-total"): return WARNINGS return SUCCESS class PyLint(ShellCommand): '''A command that knows about pylint output. It is a good idea to add --output-format=parseable to your command, since it includes the filename in the message. ''' name = "pylint" description = ["running", "pylint"] descriptionDone = ["pylint"] # pylint's return codes (see pylint(1) for details) # 1 - 16 will be bit-ORed RC_OK = 0 RC_FATAL = 1 RC_ERROR = 2 RC_WARNING = 4 RC_REFACTOR = 8 RC_CONVENTION = 16 RC_USAGE = 32 # Using the default text output, the message format is : # MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE # with --output-format=parseable it is: (the outer brackets are literal) # FILE_NAME:LINE_NUM: [MESSAGE_TYPE[, OBJECT]] MESSAGE # message type consists of the type char and 4 digits # The message types: _MESSAGES = { 'C': "convention", # for programming standard violation 'R': "refactor", # for bad code smell 'W': "warning", # for python specific problems 'E': "error", # for much probably bugs in the code 'F': "fatal", # error prevented pylint from further processing. 'I': "info", } _flunkingIssues = ("F", "E") # msg categories that cause FAILURE _re_groupname = 'errtype' _msgtypes_re_str = '(?P<%s>[%s])' % ( _re_groupname, ''.join(list(_MESSAGES))) _default_line_re = re.compile( r'^%s(\d{4})?: *\d+(, *\d+)?:.+' % _msgtypes_re_str) _parseable_line_re = re.compile( r'[^:]+:\d+: \[%s(\d{4})?(\([a-z-]+\))?[,\]] .+' % _msgtypes_re_str) def __init__(self, **kwargs): super().__init__(**kwargs) self.counts = {} self.summaries = {} self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def logConsumer(self): for m in self._MESSAGES: self.counts[m] = 0 self.summaries[m] = [] line_re = None # decide after first match while True: stream, line = yield if stream == 'h': continue if not line_re: # need to test both and then decide on one if self._parseable_line_re.match(line): line_re = self._parseable_line_re elif self._default_line_re.match(line): line_re = self._default_line_re else: # no match yet continue mo = line_re.match(line) if mo: msgtype = mo.group(self._re_groupname) assert msgtype in self._MESSAGES self.summaries[msgtype].append(line) self.counts[msgtype] += 1 def createSummary(self, log): counts, summaries = self.counts, self.summaries self.descriptionDone = self.descriptionDone[:] for msg, fullmsg in sorted(self._MESSAGES.items()): if counts[msg]: self.descriptionDone.append("%s=%d" % (fullmsg, counts[msg])) self.addCompleteLog(fullmsg, "\n".join(summaries[msg])) self.setProperty("pylint-%s" % fullmsg, counts[msg], 'Pylint') self.setProperty("pylint-total", sum(counts.values()), 'Pylint') def evaluateCommand(self, cmd): if cmd.rc & (self.RC_FATAL | self.RC_ERROR | self.RC_USAGE): return FAILURE for msg in self._flunkingIssues: if self.getProperty("pylint-%s" % self._MESSAGES[msg]): return FAILURE if self.getProperty("pylint-total"): return WARNINGS return SUCCESS class Sphinx(ShellCommand): ''' A Step to build sphinx documentation ''' name = "sphinx" description = ["running", "sphinx"] descriptionDone = ["sphinx"] haltOnFailure = True def __init__(self, sphinx_sourcedir='.', sphinx_builddir=None, sphinx_builder=None, sphinx='sphinx-build', tags=None, defines=None, strict_warnings=False, mode='incremental', **kwargs): if tags is None: tags = [] if defines is None: defines = {} if sphinx_builddir is None: # Who the heck is not interested in the built doc ? config.error("Sphinx argument sphinx_builddir is required") if mode not in ('incremental', 'full'): config.error("Sphinx argument mode has to be 'incremental' or" + "'full' is required") self.success = False super().__init__(**kwargs) # build the command command = [sphinx] if sphinx_builder is not None: command.extend(['-b', sphinx_builder]) for tag in tags: command.extend(['-t', tag]) for key in sorted(defines): if defines[key] is None: command.extend(['-D', key]) elif isinstance(defines[key], bool): command.extend(['-D', '%s=%d' % (key, defines[key] and 1 or 0)]) else: command.extend(['-D', '%s=%s' % (key, defines[key])]) if mode == 'full': command.extend(['-E']) # Don't use a saved environment if strict_warnings: command.extend(['-W']) # Convert warnings to errors command.extend([sphinx_sourcedir, sphinx_builddir]) self.setCommand(command) self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) _msgs = ('WARNING', 'ERROR', 'SEVERE') def logConsumer(self): self.warnings = [] next_is_warning = False while True: stream, line = yield if line.startswith('build succeeded') or \ line.startswith('no targets are out of date.'): self.success = True elif line.startswith('Warning, treated as error:'): next_is_warning = True else: if next_is_warning: self.warnings.append(line) next_is_warning = False else: for msg in self._msgs: if msg in line: self.warnings.append(line) def createSummary(self, log): if self.warnings: self.addCompleteLog('warnings', "\n".join(self.warnings)) self.step_status.setStatistic('warnings', len(self.warnings)) def evaluateCommand(self, cmd): if self.success: if not self.warnings: return SUCCESS return WARNINGS return FAILURE def describe(self, done=False): if not done: return ["building"] return [self.name, '%d warnings' % len(self.warnings)] buildbot-2.6.0/master/buildbot/steps/python_twisted.py000066400000000000000000000522141361162603000232200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ BuildSteps that are specific to the Twisted source tree """ import re from twisted.python import log from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.shell import ShellCommand class HLint(ShellCommand): """I run a 'lint' checker over a set of .xhtml files. Any deviations from recommended style is flagged and put in the output log. This step looks at .changes in the parent Build to extract a list of Lore XHTML files to check.""" name = "hlint" description = ["running", "hlint"] descriptionDone = ["hlint"] warnOnWarnings = True warnOnFailure = True # TODO: track time, but not output warnings = 0 def __init__(self, python=None, **kwargs): super().__init__(**kwargs) self.python = python self.warningLines = [] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) def start(self): # create the command htmlFiles = {} for f in self.build.allFiles(): if f.endswith(".xhtml") and not f.startswith("sandbox/"): htmlFiles[f] = 1 # remove duplicates hlintTargets = sorted(htmlFiles.keys()) if not hlintTargets: return SKIPPED self.hlintFiles = hlintTargets c = [] if self.python: c.append(self.python) c += ["bin/lore", "-p", "--output", "lint"] + self.hlintFiles self.setCommand(c) # add an extra log file to show the .html files we're checking self.addCompleteLog("files", "\n".join(self.hlintFiles) + "\n") super().start() def logConsumer(self): while True: stream, line = yield if ':' in line: self.warnings += 1 self.warningLines.append(line) def commandComplete(self, cmd): self.addCompleteLog('warnings', '\n'.join(self.warningLines)) def evaluateCommand(self, cmd): # warnings are in stdout, rc is always 0, unless the tools break if cmd.didFail(): return FAILURE if self.warnings: return WARNINGS return SUCCESS def getText2(self, cmd, results): if cmd.didFail(): return ["hlint"] return ["%d hlin%s" % (self.warnings, self.warnings == 1 and 't' or 'ts')] class TrialTestCaseCounter(logobserver.LogLineObserver): _line_re = re.compile(r'^(?:Doctest: )?([\w\.]+) \.\.\. \[([^\]]+)\]$') def __init__(self): super().__init__() self.numTests = 0 self.finished = False self.counts = {'total': None, 'failures': 0, 'errors': 0, 'skips': 0, 'expectedFailures': 0, 'unexpectedSuccesses': 0, } def outLineReceived(self, line): # different versions of Twisted emit different per-test lines with # the bwverbose reporter. # 2.0.0: testSlave (buildbot.test.test_runner.Create) ... [OK] # 2.1.0: buildbot.test.test_runner.Create.testSlave ... [OK] # 2.4.0: buildbot.test.test_runner.Create.testSlave ... [OK] # Let's just handle the most recent version, since it's the easiest. # Note that doctests create lines line this: # Doctest: viff.field.GF ... [OK] if line.startswith("=" * 40): self.finished = True if not self.finished: m = self._line_re.search(line.strip()) if m: testname, result = m.groups() self.numTests += 1 self.step.setProgress('tests', self.numTests) out = re.search(r'Ran (\d+) tests', line) if out: self.counts['total'] = int(out.group(1)) if (line.startswith("OK") or line.startswith("FAILED ") or line.startswith("PASSED")): # the extra space on FAILED_ is to distinguish the overall # status from an individual test which failed. The lack of a # space on the OK is because it may be printed without any # additional text (if there are no skips,etc) out = re.search(r'failures=(\d+)', line) if out: self.counts['failures'] = int(out.group(1)) out = re.search(r'errors=(\d+)', line) if out: self.counts['errors'] = int(out.group(1)) out = re.search(r'skips=(\d+)', line) if out: self.counts['skips'] = int(out.group(1)) out = re.search(r'expectedFailures=(\d+)', line) if out: self.counts['expectedFailures'] = int(out.group(1)) out = re.search(r'unexpectedSuccesses=(\d+)', line) if out: self.counts['unexpectedSuccesses'] = int(out.group(1)) # successes= is a Twisted-2.0 addition, and is not currently used out = re.search(r'successes=(\d+)', line) if out: self.counts['successes'] = int(out.group(1)) UNSPECIFIED = () # since None is a valid choice class Trial(ShellCommand): """ There are some class attributes which may be usefully overridden by subclasses. 'trialMode' and 'trialArgs' can influence the trial command line. """ name = "trial" progressMetrics = ('output', 'tests', 'test.log') # note: the slash only works on unix workers, of course, but we have # no way to know what the worker uses as a separator. # TODO: figure out something clever. logfiles = {"test.log": "_trial_temp/test.log"} # we use test.log to track Progress at the end of __init__() renderables = ['tests', 'jobs'] flunkOnFailure = True python = None trial = "trial" trialMode = ["--reporter=bwverbose"] # requires Twisted-2.1.0 or newer # for Twisted-2.0.0 or 1.3.0, use ["-o"] instead trialArgs = [] jobs = None testpath = UNSPECIFIED # required (but can be None) testChanges = False # TODO: needs better name recurse = False reactor = None randomly = False tests = None # required def __init__(self, reactor=UNSPECIFIED, python=None, trial=None, testpath=UNSPECIFIED, tests=None, testChanges=None, recurse=None, randomly=None, trialMode=None, trialArgs=None, jobs=None, **kwargs): """ @type testpath: string @param testpath: use in PYTHONPATH when running the tests. If None, do not set PYTHONPATH. Setting this to '.' will cause the source files to be used in-place. @type python: string (without spaces) or list @param python: which python executable to use. Will form the start of the argv array that will launch trial. If you use this, you should set 'trial' to an explicit path (like /usr/bin/trial or ./bin/trial). Defaults to None, which leaves it out entirely (running 'trial args' instead of 'python ./bin/trial args'). Likely values are 'python', ['python2.2'], ['python', '-Wall'], etc. @type trial: string @param trial: which 'trial' executable to run. Defaults to 'trial', which will cause $PATH to be searched and probably find /usr/bin/trial . If you set 'python', this should be set to an explicit path (because 'python2.3 trial' will not work). @type trialMode: list of strings @param trialMode: a list of arguments to pass to trial, specifically to set the reporting mode. This defaults to ['-to'] which means 'verbose colorless output' to the trial that comes with Twisted-2.0.x and at least -2.1.0 . Newer versions of Twisted may come with a trial that prefers ['--reporter=bwverbose']. @type trialArgs: list of strings @param trialArgs: a list of arguments to pass to trial, available to turn on any extra flags you like. Defaults to []. @type jobs: integer @param jobs: integer to be used as trial -j/--jobs option (for running tests on several workers). Only supported since Twisted-12.3.0. @type tests: list of strings @param tests: a list of test modules to run, like ['twisted.test.test_defer', 'twisted.test.test_process']. If this is a string, it will be converted into a one-item list. @type testChanges: boolean @param testChanges: if True, ignore the 'tests' parameter and instead ask the Build for all the files that make up the Changes going into this build. Pass these filenames to trial and ask it to look for test-case-name tags, running just the tests necessary to cover the changes. @type recurse: boolean @param recurse: If True, pass the --recurse option to trial, allowing test cases to be found in deeper subdirectories of the modules listed in 'tests'. This does not appear to be necessary when using testChanges. @type reactor: string @param reactor: which reactor to use, like 'gtk' or 'java'. If not provided, the Twisted's usual platform-dependent default is used. @type randomly: boolean @param randomly: if True, add the --random=0 argument, which instructs trial to run the unit tests in a random order each time. This occasionally catches problems that might be masked when one module always runs before another (like failing to make registerAdapter calls before lookups are done). @type kwargs: dict @param kwargs: parameters. The following parameters are inherited from L{ShellCommand} and may be useful to set: workdir, haltOnFailure, flunkOnWarnings, flunkOnFailure, warnOnWarnings, warnOnFailure, want_stdout, want_stderr, timeout. """ super().__init__(**kwargs) if python: self.python = python if self.python is not None: if isinstance(self.python, str): self.python = [self.python] for s in self.python: if " " in s: # this is not strictly an error, but I suspect more # people will accidentally try to use python="python2.3 # -Wall" than will use embedded spaces in a python flag log.msg("python= component '%s' has spaces") log.msg("To add -Wall, use python=['python', '-Wall']") why = "python= value has spaces, probably an error" raise ValueError(why) if trial: self.trial = trial if " " in self.trial: raise ValueError("trial= value has spaces") if trialMode is not None: self.trialMode = trialMode if trialArgs is not None: self.trialArgs = trialArgs if jobs is not None: self.jobs = jobs if testpath is not UNSPECIFIED: self.testpath = testpath if self.testpath is UNSPECIFIED: raise ValueError("You must specify testpath= (it can be None)") assert isinstance(self.testpath, str) or self.testpath is None if reactor is not UNSPECIFIED: self.reactor = reactor if tests is not None: self.tests = tests if isinstance(self.tests, str): self.tests = [self.tests] if testChanges is not None: self.testChanges = testChanges # self.recurse = True # not sure this is necessary if not self.testChanges and self.tests is None: raise ValueError("Must either set testChanges= or provide tests=") if recurse is not None: self.recurse = recurse if randomly is not None: self.randomly = randomly # build up most of the command, then stash it until start() command = [] if self.python: command.extend(self.python) command.append(self.trial) command.extend(self.trialMode) if self.recurse: command.append("--recurse") if self.reactor: command.append("--reactor=%s" % reactor) if self.randomly: command.append("--random=0") command.extend(self.trialArgs) self.command = command if self.reactor: self.description = ["testing", "(%s)" % self.reactor] self.descriptionDone = ["tests"] # commandComplete adds (reactorname) to self.text else: self.description = ["testing"] self.descriptionDone = ["tests"] # this counter will feed Progress along the 'test cases' metric self.observer = TrialTestCaseCounter() self.addLogObserver('stdio', self.observer) # this observer consumes multiple lines in a go, so it can't be easily # handled in TrialTestCaseCounter. self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) self.problems = [] self.warnings = {} # text used before commandComplete runs self.text = 'running' def setupEnvironment(self, cmd): super().setupEnvironment(cmd) if self.testpath is not None: e = cmd.args['env'] if e is None: cmd.args['env'] = {'PYTHONPATH': self.testpath} else: # this bit produces a list, which can be used # by buildbot_worker.runprocess.RunProcess ppath = e.get('PYTHONPATH', self.testpath) if isinstance(ppath, str): ppath = [ppath] if self.testpath not in ppath: ppath.insert(0, self.testpath) e['PYTHONPATH'] = ppath def start(self): # choose progressMetrics and logfiles based on whether trial is being # run with multiple workers or not. output_observer = logobserver.OutputProgressObserver('test.log') if self.jobs is not None: self.jobs = int(self.jobs) self.command.append("--jobs=%d" % self.jobs) # using -j/--jobs flag produces more than one test log. self.logfiles = {} for i in range(self.jobs): self.logfiles['test.%d.log' % i] = '_trial_temp/%d/test.log' % i self.logfiles['err.%d.log' % i] = '_trial_temp/%d/err.log' % i self.logfiles['out.%d.log' % i] = '_trial_temp/%d/out.log' % i self.addLogObserver('test.%d.log' % i, output_observer) else: # this one just measures bytes of output in _trial_temp/test.log self.addLogObserver('test.log', output_observer) # now that self.build.allFiles() is nailed down, finish building the # command if self.testChanges: for f in self.build.allFiles(): if f.endswith(".py"): self.command.append("--testmodule=%s" % f) else: self.command.extend(self.tests) log.msg("Trial.start: command is", self.command) super().start() def commandComplete(self, cmd): # figure out all status, then let the various hook functions return # different pieces of it counts = self.observer.counts total = counts['total'] failures, errors = counts['failures'], counts['errors'] parsed = (total is not None) text = [] text2 = "" if not cmd.didFail(): if parsed: results = SUCCESS if total: text += ["%d %s" % (total, total == 1 and "test" or "tests"), "passed"] else: text += ["no tests", "run"] else: results = FAILURE text += ["testlog", "unparseable"] text2 = "tests" else: # something failed results = FAILURE if parsed: text.append("tests") if failures: text.append("%d %s" % (failures, failures == 1 and "failure" or "failures")) if errors: text.append("%d %s" % (errors, errors == 1 and "error" or "errors")) count = failures + errors text2 = "%d tes%s" % (count, (count == 1 and 't' or 'ts')) else: text += ["tests", "failed"] text2 = "tests" if counts['skips']: text.append("%d %s" % (counts['skips'], counts['skips'] == 1 and "skip" or "skips")) if counts['expectedFailures']: text.append("%d %s" % (counts['expectedFailures'], counts['expectedFailures'] == 1 and "todo" or "todos")) if 0: # TODO pylint: disable=using-constant-test results = WARNINGS if not text2: text2 = "todo" if 0: # pylint: disable=using-constant-test # ignore unexpectedSuccesses for now, but it should really mark # the build WARNING if counts['unexpectedSuccesses']: text.append("%d surprises" % counts['unexpectedSuccesses']) results = WARNINGS if not text2: text2 = "tests" if self.reactor: text.append(self.rtext('(%s)')) if text2: text2 = "%s %s" % (text2, self.rtext('(%s)')) self.results = results self.text = text self.text2 = [text2] def rtext(self, fmt='%s'): if self.reactor: rtext = fmt % self.reactor return rtext.replace("reactor", "") return "" def logConsumer(self): while True: stream, line = yield if line.find(" exceptions.DeprecationWarning: ") != -1: # no source warning = line # TODO: consider stripping basedir prefix here self.warnings[warning] = self.warnings.get(warning, 0) + 1 elif (line.find(" DeprecationWarning: ") != -1 or line.find(" UserWarning: ") != -1): # next line is the source warning = line + "\n" + (yield)[1] + "\n" self.warnings[warning] = self.warnings.get(warning, 0) + 1 elif line.find("Warning: ") != -1: warning = line self.warnings[warning] = self.warnings.get(warning, 0) + 1 if line.find("=" * 60) == 0 or line.find("-" * 60) == 0: # read to EOF while True: self.problems.append(line) stream, line = yield def createSummary(self, loog): problems = '\n'.join(self.problems) warnings = self.warnings if problems: self.addCompleteLog("problems", problems) if warnings: lines = sorted(warnings.keys()) self.addCompleteLog("warnings", "".join(lines)) def evaluateCommand(self, cmd): return self.results def describe(self, done=False): return self.text class RemovePYCs(ShellCommand): name = "remove-.pyc" command = ['find', '.', '-name', "'*.pyc'", '-exec', 'rm', '{}', ';'] description = ["removing", ".pyc", "files"] descriptionDone = ["remove", ".pycs"] buildbot-2.6.0/master/buildbot/steps/shell.py000066400000000000000000000662001361162603000212430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import re from twisted.python import failure from twisted.python import log from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.versions import Version from buildbot import config from buildbot.process import buildstep from buildbot.process import logobserver from buildbot.process import remotecommand # for existing configurations that import WithProperties from here. We like # to move this class around just to keep our readers guessing. from buildbot.process.properties import WithProperties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.process.results import worst_status from buildbot.steps.worker import CompositeStepMixin from buildbot.util import command_to_string from buildbot.util import flatten from buildbot.util import join_list _hush_pyflakes = [WithProperties] del _hush_pyflakes class ShellCommand(buildstep.LoggingBuildStep): """I run a single shell command on the worker. I return FAILURE if the exit code of that command is non-zero, SUCCESS otherwise. To change this behavior, override my .evaluateCommand method, or customize decodeRC argument By default, a failure of this step will mark the whole build as FAILURE. To override this, give me an argument of flunkOnFailure=False . I create a single Log named 'log' which contains the output of the command. To create additional summary Logs, override my .createSummary method. The shell command I run (a list of argv strings) can be provided in several ways: - a class-level .command attribute - a command= parameter to my constructor (overrides .command) - set explicitly with my .setCommand() method (overrides both) @ivar command: a list of renderable objects (typically strings or WithProperties instances). This will be used by start() to create a RemoteShellCommand instance. @ivar logfiles: a dict mapping log NAMEs to workdir-relative FILENAMEs of their corresponding logfiles. The contents of the file named FILENAME will be put into a LogFile named NAME, ina something approximating real-time. (note that logfiles= is actually handled by our parent class LoggingBuildStep) @ivar lazylogfiles: Defaults to False. If True, logfiles will be tracked `lazily', meaning they will only be added when and if they are written to. Empty or nonexistent logfiles will be omitted. (Also handled by class LoggingBuildStep.) """ name = "shell" renderables = [ 'command', 'flunkOnFailure', 'haltOnFailure', 'remote_kwargs', 'workerEnvironment' ] command = None # set this to a command, or set in kwargs # logfiles={} # you can also set 'logfiles' to a dictionary, and it # will be merged with any logfiles= argument passed in # to __init__ # override this on a specific ShellCommand if you want to let it fail # without dooming the entire build to a status of FAILURE flunkOnFailure = True def __init__(self, workdir=None, command=None, usePTY=None, **kwargs): # most of our arguments get passed through to the RemoteShellCommand # that we create, but first strip out the ones that we pass to # BuildStep (like haltOnFailure and friends), and a couple that we # consume ourselves. if command: self.setCommand(command) if self.__class__ is ShellCommand and not command: # ShellCommand class is directly instantiated. # Explicitly check that command is set to prevent runtime error # later. config.error("ShellCommand's `command' argument is not specified") # pull out the ones that LoggingBuildStep wants, then upcall buildstep_kwargs = {} # workdir is here first positional argument, but it belongs to # BuildStep parent kwargs['workdir'] = workdir for k in list(kwargs): if k in self.__class__.parms: buildstep_kwargs[k] = kwargs[k] del kwargs[k] super().__init__(**buildstep_kwargs) # check validity of arguments being passed to RemoteShellCommand invalid_args = [] signature = inspect.signature( remotecommand.RemoteShellCommand.__init__) valid_rsc_args = signature.parameters.keys() for arg in kwargs: if arg not in valid_rsc_args: invalid_args.append(arg) # Raise Configuration error in case invalid arguments are present if invalid_args: config.error("Invalid argument(s) passed to RemoteShellCommand: " + ', '.join(invalid_args)) # everything left over goes to the RemoteShellCommand kwargs['usePTY'] = usePTY self.remote_kwargs = kwargs self.remote_kwargs['workdir'] = workdir def setBuild(self, build): super().setBuild(build) # Set this here, so it gets rendered when we start the step self.workerEnvironment = self.build.workerEnvironment def setCommand(self, command): self.command = command def _describe(self, done=False): return None def describe(self, done=False): if self.stopped and not self.rendered: return "stopped early" assert(self.rendered) desc = self._describe(done) if not desc: return None if self.descriptionSuffix: desc = desc + ' ' + join_list(self.descriptionSuffix) return desc def getCurrentSummary(self): cmdsummary = self._getLegacySummary(False) if cmdsummary: return {'step': cmdsummary} return super(ShellCommand, self).getCurrentSummary() def getResultSummary(self): cmdsummary = self._getLegacySummary(True) if cmdsummary: if self.results != SUCCESS: cmdsummary += ' (%s)' % Results[self.results] return {'step': cmdsummary} return super(ShellCommand, self).getResultSummary() def _getLegacySummary(self, done): # defer to the describe method, if set description = self.describe(done) if description: return join_list(description) # defer to descriptions, if they're set if (not done and self.description) or (done and self.descriptionDone): return None try: # if self.cmd is set, then use the RemoteCommand's info if self.cmd: command = self.cmd.remote_command # otherwise, if we were configured with a command, use that elif self.command: command = self.command else: return None rv = command_to_string(command) # add the descriptionSuffix, if one was given if self.descriptionSuffix: rv = rv + ' ' + join_list(self.descriptionSuffix) return rv except Exception: log.err(failure.Failure(), "Error describing step") return None def setupEnvironment(self, cmd): # merge in anything from workerEnvironment (which comes from the builder # config) Environment variables passed in by a BuildStep override those # passed in at the Builder level, so if we have any from the builder, # apply those and then update with the args from the buildstep # (cmd.args) workerEnv = self.workerEnvironment if workerEnv: if cmd.args['env'] is None: cmd.args['env'] = {} fullWorkerEnv = workerEnv.copy() fullWorkerEnv.update(cmd.args['env']) cmd.args['env'] = fullWorkerEnv # note that each RemoteShellCommand gets its own copy of the # dictionary, so we shouldn't be affecting anyone but ourselves. def buildCommandKwargs(self, warnings): kwargs = super().buildCommandKwargs() kwargs.update(self.remote_kwargs) kwargs['workdir'] = self.workdir kwargs['command'] = flatten(self.command, (list, tuple)) # check for the usePTY flag if 'usePTY' in kwargs and kwargs['usePTY'] is not None: if self.workerVersionIsOlderThan("shell", "2.7"): warnings.append( "NOTE: worker does not allow master to override usePTY\n") del kwargs['usePTY'] # check for the interruptSignal flag if "interruptSignal" in kwargs and self.workerVersionIsOlderThan("shell", "2.15"): warnings.append( "NOTE: worker does not allow master to specify interruptSignal\n") del kwargs['interruptSignal'] return kwargs def start(self): # this block is specific to ShellCommands. subclasses that don't need # to set up an argv array, an environment, or extra logfiles= (like # the Source subclasses) can just skip straight to startCommand() warnings = [] # create the actual RemoteShellCommand instance now kwargs = self.buildCommandKwargs(warnings) cmd = remotecommand.RemoteShellCommand(**kwargs) self.setupEnvironment(cmd) self.startCommand(cmd, warnings) class TreeSize(ShellCommand): name = "treesize" command = ["du", "-s", "-k", "."] description = "measuring tree size" kib = None def __init__(self, **kwargs): super().__init__(**kwargs) self.observer = logobserver.BufferLogObserver(wantStdout=True, wantStderr=True) self.addLogObserver('stdio', self.observer) def commandComplete(self, cmd): out = self.observer.getStdout() m = re.search(r'^(\d+)', out) if m: self.kib = int(m.group(1)) self.setProperty("tree-size-KiB", self.kib, "treesize") def evaluateCommand(self, cmd): if cmd.didFail(): return FAILURE if self.kib is None: return WARNINGS # not sure how 'du' could fail, but whatever return SUCCESS def _describe(self, done=False): if self.kib is not None: return ["treesize", "%d KiB" % self.kib] return ["treesize", "unknown"] class SetPropertyFromCommand(ShellCommand): name = "setproperty" renderables = ['property'] def __init__(self, property=None, extract_fn=None, strip=True, includeStdout=True, includeStderr=False, **kwargs): self.property = property self.extract_fn = extract_fn self.strip = strip self.includeStdout = includeStdout self.includeStderr = includeStderr if not ((property is not None) ^ (extract_fn is not None)): config.error( "Exactly one of property and extract_fn must be set") super().__init__(**kwargs) if self.extract_fn: self.includeStderr = True self.observer = logobserver.BufferLogObserver( wantStdout=self.includeStdout, wantStderr=self.includeStderr) self.addLogObserver('stdio', self.observer) self.property_changes = {} def commandComplete(self, cmd): if self.property: if cmd.didFail(): return result = self.observer.getStdout() if self.strip: result = result.strip() propname = self.property self.setProperty(propname, result, "SetPropertyFromCommand Step") self.property_changes[propname] = result else: new_props = self.extract_fn(cmd.rc, self.observer.getStdout(), self.observer.getStderr()) for k, v in new_props.items(): self.setProperty(k, v, "SetPropertyFromCommand Step") self.property_changes = new_props def createSummary(self, log): if self.property_changes: props_set = ["%s: %r" % (k, v) for k, v in sorted(self.property_changes.items())] self.addCompleteLog('property changes', "\n".join(props_set)) def describe(self, done=False): if len(self.property_changes) > 1: return ["%d properties set" % len(self.property_changes)] elif len(self.property_changes) == 1: return ["property '%s' set" % list(self.property_changes)[0]] # else: # let ShellCommand describe return super().describe(done) SetProperty = SetPropertyFromCommand deprecatedModuleAttribute(Version("Buildbot", 0, 8, 8), "It has been renamed to SetPropertyFromCommand", "buildbot.steps.shell", "SetProperty") class Configure(ShellCommand): name = "configure" haltOnFailure = 1 flunkOnFailure = 1 description = ["configuring"] descriptionDone = ["configure"] command = ["./configure"] class WarningCountingShellCommand(ShellCommand, CompositeStepMixin): renderables = [ 'suppressionFile', 'suppressionList', 'warningPattern', 'directoryEnterPattern', 'directoryLeavePattern', 'maxWarnCount', ] warnCount = 0 warningPattern = '(?i).*warning[: ].*' # The defaults work for GNU Make. directoryEnterPattern = ("make.*: Entering directory " "[\u2019\"`'](.*)[\u2019'`\"]") directoryLeavePattern = "make.*: Leaving directory" suppressionFile = None commentEmptyLineRe = re.compile(r"^\s*(#.*)?$") suppressionLineRe = re.compile( r"^\s*(.+?)\s*:\s*(.+?)\s*(?:[:]\s*([0-9]+)(?:-([0-9]+))?\s*)?$") def __init__(self, warningPattern=None, warningExtractor=None, maxWarnCount=None, directoryEnterPattern=None, directoryLeavePattern=None, suppressionFile=None, suppressionList=None, **kwargs): # See if we've been given a regular expression to use to match # warnings. If not, use a default that assumes any line with "warning" # present is a warning. This may lead to false positives in some cases. if warningPattern: self.warningPattern = warningPattern if directoryEnterPattern: self.directoryEnterPattern = directoryEnterPattern if directoryLeavePattern: self.directoryLeavePattern = directoryLeavePattern if suppressionFile: self.suppressionFile = suppressionFile # self.suppressions is already taken, so use something else self.suppressionList = suppressionList if warningExtractor: self.warningExtractor = warningExtractor else: self.warningExtractor = WarningCountingShellCommand.warnExtractWholeLine self.maxWarnCount = maxWarnCount # And upcall to let the base class do its work super().__init__(**kwargs) if self.__class__ is WarningCountingShellCommand and \ not kwargs.get('command'): # WarningCountingShellCommand class is directly instantiated. # Explicitly check that command is set to prevent runtime error # later. config.error("WarningCountingShellCommand's `command' argument " "is not specified") self.suppressions = [] self.directoryStack = [] self.warnCount = 0 self.loggedWarnings = [] self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.warningLogConsumer)) def addSuppression(self, suppressionList): """ This method can be used to add patters of warnings that should not be counted. It takes a single argument, a list of patterns. Each pattern is a 4-tuple (FILE-RE, WARN-RE, START, END). FILE-RE is a regular expression (string or compiled regexp), or None. If None, the pattern matches all files, else only files matching the regexp. If directoryEnterPattern is specified in the class constructor, matching is against the full path name, eg. src/main.c. WARN-RE is similarly a regular expression matched against the text of the warning, or None to match all warnings. START and END form an inclusive line number range to match against. If START is None, there is no lower bound, similarly if END is none there is no upper bound.""" for fileRe, warnRe, start, end in suppressionList: if fileRe is not None and isinstance(fileRe, str): fileRe = re.compile(fileRe) if warnRe is not None and isinstance(warnRe, str): warnRe = re.compile(warnRe) self.suppressions.append((fileRe, warnRe, start, end)) def warnExtractWholeLine(self, line, match): """ Extract warning text as the whole line. No file names or line numbers.""" return (None, None, line) def warnExtractFromRegexpGroups(self, line, match): """ Extract file name, line number, and warning text as groups (1,2,3) of warningPattern match.""" file = match.group(1) lineNo = match.group(2) if lineNo is not None: lineNo = int(lineNo) text = match.group(3) return (file, lineNo, text) def warningLogConsumer(self): # Now compile a regular expression from whichever warning pattern we're # using wre = self.warningPattern if isinstance(wre, str): wre = re.compile(wre) directoryEnterRe = self.directoryEnterPattern if (directoryEnterRe is not None and isinstance(directoryEnterRe, str)): directoryEnterRe = re.compile(directoryEnterRe) directoryLeaveRe = self.directoryLeavePattern if (directoryLeaveRe is not None and isinstance(directoryLeaveRe, str)): directoryLeaveRe = re.compile(directoryLeaveRe) # Check if each line in the output from this command matched our # warnings regular expressions. If did, bump the warnings count and # add the line to the collection of lines with warnings self.loggedWarnings = [] while True: stream, line = yield if directoryEnterRe: match = directoryEnterRe.search(line) if match: self.directoryStack.append(match.group(1)) continue if (directoryLeaveRe and self.directoryStack and directoryLeaveRe.search(line)): self.directoryStack.pop() continue match = wre.match(line) if match: self.maybeAddWarning(self.loggedWarnings, line, match) def maybeAddWarning(self, warnings, line, match): if self.suppressions: (file, lineNo, text) = self.warningExtractor(self, line, match) lineNo = lineNo and int(lineNo) if file is not None and file != "" and self.directoryStack: currentDirectory = '/'.join(self.directoryStack) if currentDirectory is not None and currentDirectory != "": file = "%s/%s" % (currentDirectory, file) # Skip adding the warning if any suppression matches. for fileRe, warnRe, start, end in self.suppressions: if not (file is None or fileRe is None or fileRe.match(file)): continue if not (warnRe is None or warnRe.search(text)): continue if ((start is not None and end is not None) and not (lineNo is not None and start <= lineNo <= end)): continue return warnings.append(line) self.warnCount += 1 def start(self): if self.suppressionList is not None: self.addSuppression(self.suppressionList) if self.suppressionFile is None: return super().start() d = self.getFileContentFromWorker( self.suppressionFile, abandonOnFailure=True) d.addCallback(self.uploadDone) d.addErrback(self.failed) def uploadDone(self, data): lines = data.split("\n") list = [] for line in lines: if self.commentEmptyLineRe.match(line): continue match = self.suppressionLineRe.match(line) if (match): file, test, start, end = match.groups() if (end is not None): end = int(end) if (start is not None): start = int(start) if end is None: end = start list.append((file, test, start, end)) self.addSuppression(list) return super().start() def createSummary(self, log): """ Match log lines against warningPattern. Warnings are collected into another log for this step, and the build-wide 'warnings-count' is updated.""" # If there were any warnings, make the log if lines with warnings # available if self.warnCount: self.addCompleteLog("warnings (%d)" % self.warnCount, "\n".join(self.loggedWarnings) + "\n") warnings_stat = self.getStatistic('warnings', 0) self.setStatistic('warnings', warnings_stat + self.warnCount) old_count = self.getProperty("warnings-count", 0) self.setProperty( "warnings-count", old_count + self.warnCount, "WarningCountingShellCommand") def evaluateCommand(self, cmd): result = cmd.results() if (self.maxWarnCount is not None and self.warnCount > self.maxWarnCount): result = worst_status(result, FAILURE) elif self.warnCount: result = worst_status(result, WARNINGS) return result class Compile(WarningCountingShellCommand): name = "compile" haltOnFailure = 1 flunkOnFailure = 1 description = ["compiling"] descriptionDone = ["compile"] command = ["make", "all"] class Test(WarningCountingShellCommand): name = "test" warnOnFailure = 1 description = ["testing"] descriptionDone = ["test"] command = ["make", "test"] def setTestResults(self, total=0, failed=0, passed=0, warnings=0): """ Called by subclasses to set the relevant statistics; this actually adds to any statistics already present """ total += self.getStatistic('tests-total', 0) self.setStatistic('tests-total', total) failed += self.getStatistic('tests-failed', 0) self.setStatistic('tests-failed', failed) warnings += self.getStatistic('tests-warnings', 0) self.setStatistic('tests-warnings', warnings) passed += self.getStatistic('tests-passed', 0) self.setStatistic('tests-passed', passed) def describe(self, done=False): description = super().describe(done) if done: if not description: description = [] description = description[:] # make a private copy if self.hasStatistic('tests-total'): total = self.getStatistic("tests-total", 0) failed = self.getStatistic("tests-failed", 0) passed = self.getStatistic("tests-passed", 0) warnings = self.getStatistic("tests-warnings", 0) if not total: total = failed + passed + warnings if total: description.append('%d tests' % total) if passed: description.append('%d passed' % passed) if warnings: description.append('%d warnings' % warnings) if failed: description.append('%d failed' % failed) return description class PerlModuleTestObserver(logobserver.LogLineObserver): def __init__(self, warningPattern): super().__init__() if warningPattern: self.warningPattern = re.compile(warningPattern) else: self.warningPattern = None self.rc = SUCCESS self.total = 0 self.failed = 0 self.warnings = 0 self.newStyle = False self.complete = False failedRe = re.compile(r"Tests: \d+ Failed: (\d+)\)") testsRe = re.compile(r"Files=\d+, Tests=(\d+)") oldFailureCountsRe = re.compile(r"(\d+)/(\d+) subtests failed") oldSuccessCountsRe = re.compile(r"Files=\d+, Tests=(\d+),") def outLineReceived(self, line): if self.warningPattern.match(line): self.warnings += 1 if self.newStyle: if line.startswith('Result: FAIL'): self.rc = FAILURE mo = self.failedRe.search(line) if mo: self.failed += int(mo.group(1)) if self.failed: self.rc = FAILURE mo = self.testsRe.search(line) if mo: self.total = int(mo.group(1)) else: if line.startswith('Test Summary Report'): self.newStyle = True mo = self.oldFailureCountsRe.search(line) if mo: self.failed = int(mo.group(1)) self.total = int(mo.group(2)) self.rc = FAILURE mo = self.oldSuccessCountsRe.search(line) if mo: self.total = int(mo.group(1)) class PerlModuleTest(Test): command = ["prove", "--lib", "lib", "-r", "t"] total = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.observer = PerlModuleTestObserver( warningPattern=self.warningPattern) self.addLogObserver('stdio', self.observer) def evaluateCommand(self, cmd): if self.observer.total: passed = self.observer.total - self.observer.failed self.setTestResults( total=self.observer.total, failed=self.observer.failed, passed=passed, warnings=self.observer.warnings) rc = self.observer.rc if rc == SUCCESS and self.observer.warnings: rc = WARNINGS return rc buildbot-2.6.0/master/buildbot/steps/shellsequence.py000066400000000000000000000112351361162603000227720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.process import buildstep from buildbot.process import results class ShellArg(results.ResultComputingConfigMixin): publicAttributes = ( results.ResultComputingConfigMixin.resultConfig + ["command", "logfile"]) def __init__(self, command=None, logfile=None, **kwargs): name = self.__class__.__name__ if command is None: config.error("the 'command' parameter of %s " "must not be None" % (name,)) self.command = command self.logfile = logfile for k, v in kwargs.items(): if k not in self.resultConfig: config.error("the parameter '%s' is not " "handled by ShellArg" % (k,)) setattr(self, k, v) # we don't validate anything yet as we can have renderables. def validateAttributes(self): # only make the check if we have a list if not isinstance(self.command, (str, list)): config.error("%s is an invalid command, " "it must be a string or a list" % (self.command,)) if isinstance(self.command, list): if not all([isinstance(x, str) for x in self.command]): config.error("%s must only have strings in it" % (self.command,)) runConfParams = [(p_attr, getattr(self, p_attr)) for p_attr in self.resultConfig] not_bool = [(p_attr, p_val) for (p_attr, p_val) in runConfParams if not isinstance(p_val, bool)] if not_bool: config.error("%r must be booleans" % (not_bool,)) @defer.inlineCallbacks def getRenderingFor(self, build): rv = copy.copy(self) for p_attr in self.publicAttributes: res = yield build.render(getattr(self, p_attr)) setattr(rv, p_attr, res) return rv class ShellSequence(buildstep.ShellMixin, buildstep.BuildStep): last_command = None renderables = ['commands'] def __init__(self, commands=None, **kwargs): self.commands = commands kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) def shouldRunTheCommand(self, cmd): return bool(cmd) def getFinalState(self): return self.describe(True) @defer.inlineCallbacks def runShellSequence(self, commands): terminate = False if commands is None: log.msg("After rendering, ShellSequence `commands` is None") return results.EXCEPTION overall_result = results.SUCCESS for arg in commands: if not isinstance(arg, ShellArg): log.msg("After rendering, ShellSequence `commands` list " "contains something that is not a ShellArg") return results.EXCEPTION try: arg.validateAttributes() except config.ConfigErrors as e: log.msg("After rendering, ShellSequence `commands` is " "invalid: %s" % (e,)) return results.EXCEPTION # handle the command from the arg command = arg.command if not self.shouldRunTheCommand(command): continue # keep the command around so we can describe it self.last_command = command cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName=arg.logfile) yield self.runCommand(cmd) overall_result, terminate = results.computeResultAndTermination( arg, cmd.results(), overall_result) if terminate: break return overall_result def run(self): return self.runShellSequence(self.commands) buildbot-2.6.0/master/buildbot/steps/source/000077500000000000000000000000001361162603000210565ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/steps/source/__init__.py000066400000000000000000000014131361162603000231660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.base import Source _hush_pyflakes = [Source] buildbot-2.6.0/master/buildbot/steps/source/base.py000066400000000000000000000305611361162603000223470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process.buildstep import LoggingBuildStep from buildbot.status.builder import FAILURE from buildbot.status.builder import SKIPPED from buildbot.steps.worker import CompositeStepMixin from buildbot.util import bytes2unicode class Source(LoggingBuildStep, CompositeStepMixin): """This is a base class to generate a source tree in the worker. Each version control system has a specialized subclass, and is expected to override __init__ and implement computeSourceRevision() and startVC(). The class as a whole builds up the self.args dictionary, then starts a RemoteCommand with those arguments. """ renderables = ['description', 'descriptionDone', 'descriptionSuffix', 'workdir', 'env'] description = None # set this to a list of short strings to override descriptionDone = None # alternate description when the step is complete descriptionSuffix = None # extra information to append to suffix # if the checkout fails, there's no point in doing anything else haltOnFailure = True flunkOnFailure = True notReally = False branch = None # the default branch, should be set in __init__ def __init__(self, workdir=None, mode='update', alwaysUseLatest=False, timeout=20 * 60, retry=None, env=None, logEnviron=True, description=None, descriptionDone=None, descriptionSuffix=None, codebase='', **kwargs): """ @type workdir: string @param workdir: local directory (relative to the Builder's root) where the tree should be placed @type alwaysUseLatest: boolean @param alwaysUseLatest: whether to always update to the most recent available sources for this build. Normally the Source step asks its Build for a list of all Changes that are supposed to go into the build, then computes a 'source stamp' (revision number or timestamp) that will cause exactly that set of changes to be present in the checked out tree. This is turned into, e.g., 'cvs update -D timestamp', or 'svn update -r revnum'. If alwaysUseLatest=True, bypass this computation and always update to the latest available sources for each build. The source stamp helps avoid a race condition in which someone commits a change after the master has decided to start a build but before the worker finishes checking out the sources. At best this results in a build which contains more changes than the buildmaster thinks it has (possibly resulting in the wrong person taking the blame for any problems that result), at worst is can result in an incoherent set of sources (splitting a non-atomic commit) which may not build at all. @type logEnviron: boolean @param logEnviron: If this option is true (the default), then the step's logfile will describe the environment variables on the worker. In situations where the environment is not relevant and is long, it may be easier to set logEnviron=False. @type codebase: string @param codebase: Specifies which changes in a build are processed by the step. The default codebase value is ''. The codebase must correspond to a codebase assigned by the codebaseGenerator. If no codebaseGenerator is defined in the master then codebase doesn't need to be set, the default value will then match all changes. """ descriptions_for_mode = { "clobber": "checkout", "export": "exporting"} descriptionDones_for_mode = { "clobber": "checkout", "export": "export"} if not description: description = [descriptions_for_mode.get(mode, "updating")] if not descriptionDone: descriptionDone = [descriptionDones_for_mode.get(mode, "update")] if not descriptionSuffix and codebase: descriptionSuffix = [codebase] super().__init__(description=description, descriptionDone=descriptionDone, descriptionSuffix=descriptionSuffix, **kwargs) # This will get added to args later, after properties are rendered self.workdir = workdir self.sourcestamp = None self.codebase = codebase if self.codebase: self.name = properties.Interpolate( "%(kw:name)s-%(kw:codebase)s", name=self.name, codebase=self.codebase) self.alwaysUseLatest = alwaysUseLatest self.logEnviron = logEnviron self.env = env self.timeout = timeout self.retry = retry def _hasAttrGroupMember(self, attrGroup, attr): """ The hasattr equivalent for attribute groups: returns whether the given member is in the attribute group. """ method_name = '%s_%s' % (attrGroup, attr) return hasattr(self, method_name) def _getAttrGroupMember(self, attrGroup, attr): """ The getattr equivalent for attribute groups: gets and returns the attribute group member. """ method_name = '%s_%s' % (attrGroup, attr) return getattr(self, method_name) def _listAttrGroupMembers(self, attrGroup): """ Returns a list of all members in the attribute group. """ from inspect import getmembers, ismethod methods = getmembers(self, ismethod) group_prefix = attrGroup + '_' group_len = len(group_prefix) group_members = [method[0][group_len:] for method in methods if method[0].startswith(group_prefix)] return group_members def updateSourceProperty(self, name, value, source=''): """ Update a property, indexing the property by codebase if codebase is not ''. Source steps should generally use this instead of setProperty. """ # pick a decent source name if source == '': source = self.__class__.__name__ if self.codebase != '': assert not isinstance(self.getProperty(name, None), str), \ "Sourcestep %s has a codebase, other sourcesteps don't" \ % self.name property_dict = self.getProperty(name, {}) property_dict[self.codebase] = value super().setProperty(name, property_dict, source) else: assert not isinstance(self.getProperty(name, None), dict), \ "Sourcestep %s does not have a codebase, other sourcesteps do" \ % self.name super().setProperty(name, value, source) def describe(self, done=False): desc = self.descriptionDone if done else self.description if self.descriptionSuffix: desc = desc[:] desc.extend(self.descriptionSuffix) return desc def computeSourceRevision(self, changes): """Each subclass must implement this method to do something more precise than -rHEAD every time. For version control systems that use repository-wide change numbers (SVN, P4), this can simply take the maximum such number from all the changes involved in this build. For systems that do not (CVS), it needs to create a timestamp based upon the latest Change, the Build's treeStableTimer, and an optional self.checkoutDelay value.""" return None def applyPatch(self, patch): patch_command = ['patch', '-p%s' % patch[0], '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff'] cmd = remotecommand.RemoteShellCommand(self.workdir, patch_command, env=self.env, logEnviron=self.logEnviron) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if cmd.didFail(): raise buildstep.BuildStepFailed() return cmd.rc return d def patch(self, _, patch): diff = patch[1] root = None if len(patch) >= 3: root = patch[2] if (root and self.build.path_module.abspath(self.build.path_module.join(self.workdir, root) ).startswith(self.build.path_module.abspath(self.workdir))): self.workdir = self.build.path_module.join(self.workdir, root) d = self.downloadFileContentToWorker('.buildbot-diff', diff) d.addCallback( lambda _: self.downloadFileContentToWorker('.buildbot-patched', 'patched\n')) d.addCallback(lambda _: self.applyPatch(patch)) cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.build.path_module.join(self.workdir, ".buildbot-diff"), 'logEnviron': self.logEnviron}) cmd.useLog(self.stdio_log, False) d.addCallback(lambda _: self.runCommand(cmd)) @d.addCallback def evaluateCommand(_): if cmd.didFail(): raise buildstep.BuildStepFailed() return cmd.rc return d def sourcedirIsPatched(self): d = self.pathExists( self.build.path_module.join(self.workdir, '.buildbot-patched')) return d def start(self): if self.notReally: log.msg("faking %s checkout/update" % self.name) self.step_status.setText(["fake", self.name, "successful"]) self.addCompleteLog("log", "Faked %s checkout/update 'successful'\n" % self.name) return SKIPPED if not self.alwaysUseLatest: # what source stamp would this step like to use? s = self.build.getSourceStamp(self.codebase) self.sourcestamp = s if self.sourcestamp: # if branch is None, then use the Step's "default" branch branch = s.branch or self.branch # if revision is None, use the latest sources (-rHEAD) revision = s.revision if not revision: revision = self.computeSourceRevision(s.changes) # the revision property is currently None, so set it to something # more interesting if revision is not None: self.updateSourceProperty('revision', str(revision)) # if patch is None, then do not patch the tree after checkout # 'patch' is None or a tuple of (patchlevel, diff, root) # root is optional. patch = s.patch if patch: self.addCompleteLog("patch", bytes2unicode(patch[1])) else: log.msg( "No sourcestamp found in build for codebase '%s'" % self.codebase) self.step_status.setText( ["Codebase", '%s' % self.codebase, "not", "in", "build"]) self.addCompleteLog("log", "No sourcestamp found in build for codebase '%s'" % self.codebase) return FAILURE else: revision = None branch = self.branch patch = None self.startVC(branch, revision, patch) buildbot-2.6.0/master/buildbot/steps/source/bzr.py000066400000000000000000000225751361162603000222400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.steps.source.base import Source class Bzr(Source): name = 'bzr' renderables = ['repourl', 'baseURL'] def __init__(self, repourl=None, baseURL=None, mode='incremental', method=None, defaultBranch=None, **kwargs): self.repourl = repourl self.baseURL = baseURL self.branch = defaultBranch self.mode = mode self.method = method super().__init__(**kwargs) if repourl and baseURL: raise ValueError("you must provide exactly one of repourl and" " baseURL") if repourl is None and baseURL is None: raise ValueError("you must provide at least one of repourl and" " baseURL") if baseURL is not None and defaultBranch is None: raise ValueError("you must provide defaultBranch with baseURL") if not self._hasAttrGroupMember('mode', self.mode): raise ValueError("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'full': assert self.method in ['clean', 'fresh', 'clobber', 'copy', None] def startVC(self, branch, revision, patch): if branch: self.branch = branch self.revision = revision self.method = self._getMethod() self.stdio_log = self.addLogForRemoteCommands("stdio") if self.repourl is None: self.repourl = os.path.join(self.baseURL, self.branch) d = self.checkBzr() @d.addCallback def checkInstall(bzrInstalled): if not bzrInstalled: raise WorkerTooOldError("bzr is not installed on worker") return 0 d.addCallback(lambda _: self.sourcedirIsPatched()) @d.addCallback def checkPatched(patched): if patched: return self._dovccmd(['clean-tree', '--ignored', '--force']) return 0 d.addCallback(self._getAttrGroupMember('mode', self.mode)) if patch: d.addCallback(self.patch, patch) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) return d @defer.inlineCallbacks def mode_incremental(self, _): updatable = yield self._sourcedirIsUpdatable() if updatable: command = ['update'] if self.revision: command.extend(['-r', self.revision]) yield self._dovccmd(command) else: yield self._doFull() @defer.inlineCallbacks def mode_full(self, _): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': self.workdir = 'source' yield self.copy() return updatable = self._sourcedirIsUpdatable() if not updatable: log.msg("No bzr repo present, making full checkout") yield self._doFull() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() else: raise ValueError("Unknown method, check your configuration") def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def checkRemoval(res): if cmd.rc != 0: raise RuntimeError("Failed to delete directory") return cmd.rc return d def clobber(self): d = self._clobber() d.addCallback(lambda _: self._doFull()) return d def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': 'build', 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) d.addCallback(self.mode_incremental) @d.addCallback def copy(_): cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) return d return d def clean(self): d = self._dovccmd(['clean-tree', '--ignored', '--force']) command = ['update'] if self.revision: command.extend(['-r', self.revision]) d.addCallback(lambda _: self._dovccmd(command)) return d def fresh(self): d = self._dovccmd(['clean-tree', '--force']) command = ['update'] if self.revision: command.extend(['-r', self.revision]) d.addCallback(lambda _: self._dovccmd(command)) return d def _doFull(self): command = ['checkout', self.repourl, '.'] if self.revision: command.extend(['-r', self.revision]) if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._dovccmd(command, abandonOnFailure=abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self._doFull()) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) log.msg("Closing log, sending result of the command %s " % (self.cmd)) return results d.addCallback(self.finished) return d def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '.bzr')) def computeSourceRevision(self, changes): if not changes: return None lastChange = max([int(c.revision) for c in changes]) return lastChange def _dovccmd(self, command, abandonOnFailure=True, collectStdout=False): cmd = remotecommand.RemoteShellCommand(self.workdir, ['bzr'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc return d def checkBzr(self): d = self._dovccmd(['--version']) @d.addCallback def check(res): return res == 0 return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' def parseGotRevision(self, _): d = self._dovccmd(["version-info", "--custom", "--template='{revno}"], collectStdout=True) @d.addCallback def setrev(stdout): revision = stdout.strip("'") try: int(revision) except ValueError: log.msg("Invalid revision number") raise buildstep.BuildStepFailed() log.msg("Got Git revision %s" % (revision, )) self.updateSourceProperty('got_revision', revision) return 0 return d buildbot-2.6.0/master/buildbot/steps/source/cvs.py000066400000000000000000000316111361162603000222250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import time from email.utils import formatdate from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.remotetransfer import StringFileWriter from buildbot.steps.source.base import Source class CVS(Source): name = "cvs" renderables = ["cvsroot"] def __init__(self, cvsroot=None, cvsmodule='', mode='incremental', method=None, branch=None, global_options=None, extra_options=None, login=None, **kwargs): self.cvsroot = cvsroot self.cvsmodule = cvsmodule self.branch = branch if global_options is None: global_options = [] self.global_options = global_options if extra_options is None: extra_options = [] self.extra_options = extra_options self.login = login self.mode = mode self.method = method self.srcdir = 'source' if not self._hasAttrGroupMember('mode', self.mode): raise ValueError("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) super().__init__(**kwargs) def startVC(self, branch, revision, patch): self.branch = branch self.revision = revision self.stdio_log = self.addLogForRemoteCommands("stdio") self.method = self._getMethod() d = self.checkCvs() @d.addCallback def checkInstall(cvsInstalled): if not cvsInstalled: raise WorkerTooOldError("CVS is not installed on worker") return 0 d.addCallback(self.checkLogin) d.addCallback(lambda _: self.sourcedirIsPatched()) @d.addCallback def checkPatched(patched): if patched: return self.purge(False) return 0 d.addCallback(self._getAttrGroupMember('mode', self.mode)) if patch: d.addCallback(self.patch, patch) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) return d @defer.inlineCallbacks def mode_incremental(self, _): updatable = yield self._sourcedirIsUpdatable() if updatable: rv = yield self.doUpdate() else: rv = yield self.clobber() return rv @defer.inlineCallbacks def mode_full(self, _): if self.method == 'clobber': rv = yield self.clobber() return rv elif self.method == 'copy': rv = yield self.copy() return rv updatable = yield self._sourcedirIsUpdatable() if not updatable: log.msg("CVS repo not present, making full checkout") rv = yield self.doCheckout(self.workdir) elif self.method == 'clean': rv = yield self.clean() elif self.method == 'fresh': rv = yield self.fresh() else: raise ValueError("Unknown method, check your configuration") return rv def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def checkRemoval(res): if cmd.rc: raise RuntimeError("Failed to delete directory") return cmd.rc return d def clobber(self): d = self._clobber() d.addCallback(lambda _: self.doCheckout(self.workdir)) return d def fresh(self, ): d = self.purge(True) d.addCallback(lambda _: self.doUpdate()) return d def clean(self, ): d = self.purge(False) d.addCallback(lambda _: self.doUpdate()) return d def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) old_workdir = self.workdir self.workdir = self.srcdir d.addCallback(self.mode_incremental) @d.addCallback def copy(_): cmd = remotecommand.RemoteCommand('cpdir', { 'fromdir': self.srcdir, 'todir': old_workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) return d @d.addCallback def resetWorkdir(_): self.workdir = old_workdir return 0 return d def purge(self, ignore_ignores): command = ['cvsdiscard'] if ignore_ignores: command += ['--ignore'] cmd = remotecommand.RemoteShellCommand(self.workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluate(cmd): if cmd.didFail(): raise buildstep.BuildStepFailed() return cmd.rc return d def doCheckout(self, dir): command = ['-d', self.cvsroot, '-z3', 'checkout', '-d', dir] command = self.global_options + command + self.extra_options if self.branch: command += ['-r', self.branch] if self.revision: command += ['-D', self.revision] command += [self.cvsmodule] if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._dovccmd(command, '', abandonOnFailure=abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self.doCheckout(self.workdir)) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d def doUpdate(self): command = ['-z3', 'update', '-dP'] branch = self.branch # special case. 'cvs update -r HEAD -D today' gives no files; see #2351 if branch == 'HEAD' and self.revision: branch = None if branch: command += ['-r', self.branch] if self.revision: command += ['-D', self.revision] d = self._dovccmd(command) return d def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) return results d.addCallback(self.finished) return d def checkLogin(self, _): if self.login: d = self._dovccmd(['-d', self.cvsroot, 'login'], initialStdin=self.login + "\n") else: d = defer.succeed(0) return d def _dovccmd(self, command, workdir=None, abandonOnFailure=True, initialStdin=None): if workdir is None: workdir = self.workdir if not command: raise ValueError("No command specified") cmd = remotecommand.RemoteShellCommand(workdir, ['cvs'] + command, env=self.env, timeout=self.timeout, logEnviron=self.logEnviron, initialStdin=initialStdin) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if cmd.rc != 0 and abandonOnFailure: log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() return cmd.rc return d def _cvsEntriesContainStickyDates(self, entries): for line in entries.splitlines(): if line == 'D': # the last line contains just a single 'D' pass elif line.split('/')[-1].startswith('D'): # fields are separated by slashes, the last field contains the tag or date # sticky dates start with 'D' return True return False # no sticky dates @defer.inlineCallbacks def _sourcedirIsUpdatable(self): myFileWriter = StringFileWriter() args = { 'workdir': self.build.path_module.join(self.workdir, 'CVS'), 'writer': myFileWriter, 'maxsize': None, 'blocksize': 32 * 1024, } def uploadFileArgs(source): full_args = dict(args) if self.workerVersionIsOlderThan('uploadFile', '3.0'): full_args['slavesrc'] = source else: full_args['workersrc'] = source return full_args cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Root'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False # on Windows, the cvsroot may not contain the password, so compare to # both cvsroot_without_pw = re.sub("(:pserver:[^:]*):[^@]*(@.*)", r"\1\2", self.cvsroot) if myFileWriter.buffer.strip() not in (self.cvsroot, cvsroot_without_pw): return False myFileWriter.buffer = "" cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Repository'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False if myFileWriter.buffer.strip() != self.cvsmodule: return False # if there are sticky dates (from an earlier build with revision), # we can't update (unless we remove those tags with cvs update -A) myFileWriter.buffer = "" cmd = buildstep.RemoteCommand('uploadFile', uploadFileArgs('Entries'), ignore_updates=True) yield self.runCommand(cmd) if cmd.rc is not None and cmd.rc != 0: return False if self._cvsEntriesContainStickyDates(myFileWriter.buffer): return False return True def parseGotRevision(self, res): revision = time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime()) self.updateSourceProperty('got_revision', revision) return res def checkCvs(self): d = self._dovccmd(['--version']) @d.addCallback def check(res): return res == 0 return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' def computeSourceRevision(self, changes): if not changes: return None lastChange = max([c.when for c in changes]) lastSubmit = max([br.submittedAt for br in self.build.requests]) when = (lastChange + lastSubmit) / 2 return formatdate(when) buildbot-2.6.0/master/buildbot/steps/source/darcs.py000066400000000000000000000207141361162603000225300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for darcs """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Darcs(Source): """ Class for Darcs with all smarts """ name = 'darcs' renderables = ['repourl'] possible_methods = ('clobber', 'copy') def __init__(self, repourl=None, mode='incremental', method=None, **kwargs): self.repourl = repourl self.method = method self.mode = mode super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'incremental' and self.method: errors.append("Incremental mode does not require method") if self.mode == 'full': if self.method is None: self.method = 'copy' elif self.method not in self.possible_methods: errors.append("Invalid method for mode == %s" % (self.mode)) if repourl is None: errors.append("you must provide repourl") if errors: raise ConfigErrors(errors) def startVC(self, branch, revision, patch): self.revision = revision self.stdio_log = self.addLogForRemoteCommands("stdio") d = self.checkDarcs() @d.addCallback def checkInstall(darcsInstalled): if not darcsInstalled: raise WorkerTooOldError("Darcs is not installed on worker") return 0 d.addCallback(lambda _: self.sourcedirIsPatched()) @d.addCallback def checkPatched(patched): if patched: return self.copy() return 0 d.addCallback(self._getAttrGroupMember('mode', self.mode)) if patch: d.addCallback(self.patch, patch) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) return d def checkDarcs(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['darcs', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluate(_): return cmd.rc == 0 return d @defer.inlineCallbacks def mode_full(self, _): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return @defer.inlineCallbacks def mode_incremental(self, _): updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self._checkout() else: command = ['darcs', 'pull', '--all', '--verbose'] yield self._dovccmd(command) def copy(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) self.workdir = 'source' d.addCallback(self.mode_incremental) @d.addCallback def copy(_): cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) return d @d.addCallback def resetWorkdir(_): self.workdir = 'build' return 0 return d def clobber(self): d = self.runRmdir(self.workdir) d.addCallback(lambda _: self._checkout()) return d def _clone(self, abandonOnFailure=False): command = ['darcs', 'get', '--verbose', '--lazy', '--repo-name', self.workdir] d = defer.succeed(0) if self.revision: d.addCallback( lambda _: self.downloadFileContentToWorker('.darcs-context', self.revision)) command.append('--context') command.append('.darcs-context') command.append(self.repourl) d.addCallback(lambda _: self._dovccmd(command, abandonOnFailure=abandonOnFailure, wkdir='.')) return d def _checkout(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._clone(abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self.runRmdir(self.workdir)) df.addCallback(lambda _: self._checkout()) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) log.msg("Closing log, sending result of the command %s " % (self.cmd)) return results d.addCallback(self.finished) return d @defer.inlineCallbacks def parseGotRevision(self, _): revision = yield self._dovccmd(['darcs', 'changes', '--max-count=1'], collectStdout=True) self.updateSourceProperty('got_revision', revision) return 0 def _dovccmd(self, command, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True, wkdir=None): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} workdir = wkdir or self.workdir cmd = remotecommand.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc return d def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '_darcs')) buildbot-2.6.0/master/buildbot/steps/source/gerrit.py000066400000000000000000000036751361162603000227370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.git import Git class Gerrit(Git): def startVC(self, branch, revision, patch): gerrit_branch = None changed_project = self.build.getProperty('event.change.project') if (not self.sourcestamp or (self.sourcestamp.project != changed_project)): # If we don't have a sourcestamp, or the project is wrong, this # isn't the repo that's changed. Drop through and check out the # head of the given branch pass elif self.build.hasProperty("event.patchSet.ref"): gerrit_branch = self.build.getProperty("event.patchSet.ref") self.updateSourceProperty("gerrit_branch", gerrit_branch) else: try: change = self.build.getProperty("gerrit_change", '').split('/') if len(change) == 2: gerrit_branch = "refs/changes/%2.2d/%d/%d" \ % (int(change[0]) % 100, int(change[0]), int(change[1])) self.updateSourceProperty("gerrit_branch", gerrit_branch) except Exception: pass branch = gerrit_branch or branch super(Gerrit, self).startVC(branch, revision, patch) buildbot-2.6.0/master/buildbot/steps/source/git.py000066400000000000000000000641101361162603000222150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot import config as bbconfig from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.steps.source.base import Source from buildbot.steps.worker import CompositeStepMixin from buildbot.util.git import RC_SUCCESS from buildbot.util.git import GitStepMixin GIT_HASH_LENGTH = 40 def isTrueOrIsExactlyZero(v): # nonzero values are true... if v: return True # ... and True for the number zero, but we have to # explicitly guard against v==False, since # isinstance(False, int) is surprisingly True if isinstance(v, int) and v is not False: return True # all other false-ish values are false return False git_describe_flags = [ # on or off ('all', lambda v: ['--all'] if v else None), ('always', lambda v: ['--always'] if v else None), ('contains', lambda v: ['--contains'] if v else None), ('debug', lambda v: ['--debug'] if v else None), ('long', lambda v: ['--long'] if v else None), ('exact-match', lambda v: ['--exact-match'] if v else None), ('tags', lambda v: ['--tags'] if v else None), # string parameter ('match', lambda v: ['--match', v] if v else None), # numeric parameter ('abbrev', lambda v: ['--abbrev=%s' % v] if isTrueOrIsExactlyZero(v) else None), ('candidates', lambda v: ['--candidates=%s' % v] if isTrueOrIsExactlyZero(v) else None), # optional string parameter ('dirty', lambda v: ['--dirty'] if (v is True or v == '') else None), ('dirty', lambda v: ['--dirty=%s' % v] if (v and v is not True) else None), ] class Git(Source, GitStepMixin): name = 'git' renderables = ["repourl", "reference", "branch", "codebase", "mode", "method", "origin"] def __init__(self, repourl=None, branch='HEAD', mode='incremental', method=None, reference=None, submodules=False, shallow=False, progress=True, retryFetch=False, clobberOnFailure=False, getDescription=False, config=None, origin=None, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, **kwargs): if not getDescription and not isinstance(getDescription, dict): getDescription = False self.branch = branch self.method = method self.repourl = repourl self.reference = reference self.retryFetch = retryFetch self.submodules = submodules self.shallow = shallow self.clobberOnFailure = clobberOnFailure self.mode = mode self.prog = progress self.getDescription = getDescription self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.config = config self.srcdir = 'source' self.origin = origin super().__init__(**kwargs) self.setupGitStep() if isinstance(self.mode, str): if not self._hasAttrGroupMember('mode', self.mode): bbconfig.error("Git: mode must be %s" % (' or '.join(self._listAttrGroupMembers('mode')))) if isinstance(self.method, str): if (self.mode == 'full' and self.method not in ['clean', 'fresh', 'clobber', 'copy', None]): bbconfig.error("Git: invalid method for mode 'full'.") if self.shallow and (self.mode != 'full' or self.method != 'clobber'): bbconfig.error( "Git: shallow only possible with mode 'full' and method 'clobber'.") if not isinstance(self.getDescription, (bool, dict)): bbconfig.error("Git: getDescription must be a boolean or a dict.") @defer.inlineCallbacks def startVC(self, branch, revision, patch): self.branch = branch or 'HEAD' self.revision = revision self.method = self._getMethod() self.stdio_log = self.addLogForRemoteCommands("stdio") try: gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerTooOldError("git is not installed on worker") patched = yield self.sourcedirIsPatched() if patched: yield self._dovccmd(['clean', '-f', '-f', '-d', '-x']) yield self._downloadSshPrivateKeyIfNeeded() yield self._getAttrGroupMember('mode', self.mode)() if patch: yield self.patch(None, patch=patch) yield self.parseGotRevision() res = yield self.parseCommitDescription() yield self._removeSshPrivateKeyIfNeeded() yield self.finish(res) except Exception as e: yield self._removeSshPrivateKeyIfNeeded() yield self.failed(e) @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return action = yield self._sourcedirIsUpdatable() if action == "clobber": yield self.clobber() return elif action == "clone": log.msg("No git repo present, making full clone") yield self._fullCloneOrFallback() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def mode_incremental(self): action = yield self._sourcedirIsUpdatable() # if not updatable, do a full checkout if action == "clobber": yield self.clobber() return elif action == "clone": log.msg("No git repo present, making full clone") yield self._fullCloneOrFallback() return yield self._fetchOrFallback() yield self._syncSubmodule(None) yield self._updateSubmodule(None) @defer.inlineCallbacks def clean(self): clean_command = ['clean', '-f', '-f', '-d'] rc = yield self._dovccmd(clean_command) if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._fetchOrFallback() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._syncSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._updateSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed rc = yield self._cleanSubmodule() if rc != RC_SUCCESS: raise buildstep.BuildStepFailed if self.submodules: rc = yield self._dovccmd(clean_command) if rc != RC_SUCCESS: raise buildstep.BuildStepFailed return RC_SUCCESS @defer.inlineCallbacks def clobber(self): yield self._doClobber() res = yield self._fullClone(shallowClone=self.shallow) if res != RC_SUCCESS: raise buildstep.BuildStepFailed @defer.inlineCallbacks def fresh(self): clean_command = ['clean', '-f', '-f', '-d', '-x'] res = yield self._dovccmd(clean_command, abandonOnFailure=False) if res == RC_SUCCESS: yield self._fetchOrFallback() else: yield self._doClobber() yield self._fullCloneOrFallback() yield self._syncSubmodule() yield self._updateSubmodule() yield self._cleanSubmodule() if self.submodules: yield self._dovccmd(clean_command) @defer.inlineCallbacks def copy(self): yield self.runRmdir(self.workdir, abandonOnFailure=False, timeout=self.timeout) old_workdir = self.workdir self.workdir = self.srcdir try: yield self.mode_incremental() cmd = buildstep.RemoteCommand('cpdir', {'fromdir': self.srcdir, 'todir': old_workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() return RC_SUCCESS finally: self.workdir = old_workdir @defer.inlineCallbacks def finish(self, res): self.setStatus(self.cmd, res) log.msg("Closing log, sending result of the command %s " % (self.cmd)) yield self.finished(res) @defer.inlineCallbacks def parseGotRevision(self, _=None): stdout = yield self._dovccmd(['rev-parse', 'HEAD'], collectStdout=True) revision = stdout.strip() if len(revision) != GIT_HASH_LENGTH: raise buildstep.BuildStepFailed() log.msg("Got Git revision %s" % (revision, )) self.updateSourceProperty('got_revision', revision) return RC_SUCCESS @defer.inlineCallbacks def parseCommitDescription(self, _=None): # dict() should not return here if isinstance(self.getDescription, bool) and not self.getDescription: return RC_SUCCESS cmd = ['describe'] if isinstance(self.getDescription, dict): for opt, arg in git_describe_flags: opt = self.getDescription.get(opt, None) arg = arg(opt) if arg: cmd.extend(arg) # 'git describe' takes a commitish as an argument for all options # *except* --dirty if not any(arg.startswith('--dirty') for arg in cmd): cmd.append('HEAD') try: stdout = yield self._dovccmd(cmd, collectStdout=True) desc = stdout.strip() self.updateSourceProperty('commit-description', desc) except Exception: pass return RC_SUCCESS def _getSshDataWorkDir(self): if self.method == 'copy' and self.mode == 'full': return self.srcdir return self.workdir @defer.inlineCallbacks def _fetch(self, _): fetch_required = True # If the revision already exists in the repo, we don't need to fetch. if self.revision: rc = yield self._dovccmd(['cat-file', '-e', self.revision], abandonOnFailure=False) if rc == RC_SUCCESS: fetch_required = False if fetch_required: command = ['fetch', '-t', self.repourl, self.branch] # If the 'progress' option is set, tell git fetch to output # progress information to the log. This can solve issues with # long fetches killed due to lack of output, but only works # with Git 1.7.2 or later. if self.prog: if self.supportsProgress: command.append('--progress') else: print("Git versions < 1.7.2 don't support progress") yield self._dovccmd(command) if self.revision: rev = self.revision else: rev = 'FETCH_HEAD' command = ['reset', '--hard', rev, '--'] abandonOnFailure = not self.retryFetch and not self.clobberOnFailure res = yield self._dovccmd(command, abandonOnFailure) # Rename the branch if needed. if res == RC_SUCCESS and self.branch != 'HEAD': # Ignore errors yield self._dovccmd(['checkout', '-B', self.branch], abandonOnFailure=False) return res @defer.inlineCallbacks def _fetchOrFallback(self, _=None): """ Handles fallbacks for failure of fetch, wrapper for self._fetch """ res = yield self._fetch(None) if res == RC_SUCCESS: return res elif self.retryFetch: yield self._fetch(None) elif self.clobberOnFailure: yield self.clobber() else: raise buildstep.BuildStepFailed() @defer.inlineCallbacks def _clone(self, shallowClone): """Retry if clone failed""" command = ['clone'] switchToBranch = False if self.supportsBranch and self.branch != 'HEAD': if self.branch.startswith('refs/'): # we can't choose this branch from 'git clone' directly; we # must do so after the clone switchToBranch = True command += ['--no-checkout'] else: command += ['--branch', self.branch] if shallowClone: command += ['--depth', str(int(shallowClone))] if self.reference: command += ['--reference', self.reference] if self.origin: command += ['--origin', self.origin] command += [self.repourl, '.'] if self.prog: if self.supportsProgress: command.append('--progress') else: print("Git versions < 1.7.2 don't support progress") if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True # If it's a shallow clone abort build step res = yield self._dovccmd(command, abandonOnFailure=(abandonOnFailure and shallowClone)) if switchToBranch: res = yield self._fetch(None) done = self.stopped or res == RC_SUCCESS # or shallow clone?? if self.retry and not done: delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._doClobber()) df.addCallback(lambda _: self._clone(shallowClone)) reactor.callLater(delay, df.callback, None) res = yield df return res @defer.inlineCallbacks def _fullClone(self, shallowClone=False): """Perform full clone and checkout to the revision if specified In the case of shallow clones if any of the step fail abort whole build step. """ res = yield self._clone(shallowClone) if res != RC_SUCCESS: return res # If revision specified checkout that revision if self.revision: res = yield self._dovccmd(['reset', '--hard', self.revision, '--'], shallowClone) # init and update submodules, recursively. If there's not recursion # it will not do it. if self.submodules: res = yield self._dovccmd(['submodule', 'update', '--init', '--recursive'], shallowClone) return res @defer.inlineCallbacks def _fullCloneOrFallback(self): """Wrapper for _fullClone(). In the case of failure, if clobberOnFailure is set to True remove the build directory and try a full clone again. """ res = yield self._fullClone() if res != RC_SUCCESS: if not self.clobberOnFailure: raise buildstep.BuildStepFailed() res = yield self.clobber() return res @defer.inlineCallbacks def _doClobber(self): """Remove the work directory""" rc = yield self.runRmdir(self.workdir, timeout=self.timeout) if rc != RC_SUCCESS: raise RuntimeError("Failed to delete directory") return rc def computeSourceRevision(self, changes): if not changes: return None return changes[-1].revision @defer.inlineCallbacks def _syncSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: rc = yield self._dovccmd(['submodule', 'sync']) return rc @defer.inlineCallbacks def _updateSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: vccmd = ['submodule', 'update', '--init', '--recursive'] if self.supportsSubmoduleForce: vccmd.extend(['--force']) if self.supportsSubmoduleCheckout: vccmd.extend(['--checkout']) rc = yield self._dovccmd(vccmd) return rc @defer.inlineCallbacks def _cleanSubmodule(self, _=None): rc = RC_SUCCESS if self.submodules: subcommand = 'git clean -f -f -d' if self.mode == 'full' and self.method == 'fresh': subcommand += ' -x' command = ['submodule', 'foreach', '--recursive', subcommand] rc = yield self._dovccmd(command) return rc def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' @defer.inlineCallbacks def applyPatch(self, patch): yield self._dovccmd(['update-index', '--refresh']) res = yield self._dovccmd(['apply', '--index', '-p', str(patch[0])], initialStdin=patch[1]) return res @defer.inlineCallbacks def _sourcedirIsUpdatable(self): if self.workerVersionIsOlderThan('listdir', '2.16'): git_path = self.build.path_module.join(self.workdir, '.git') exists = yield self.pathExists(git_path) if exists: return "update" return "clone" cmd = buildstep.RemoteCommand('listdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if 'files' not in cmd.updates: # no files - directory doesn't exist return "clone" files = cmd.updates['files'][0] if '.git' in files: return "update" elif files: return "clobber" else: return "clone" class GitPush(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gitpush' renderables = ['repourl', 'branch'] def __init__(self, workdir=None, repourl=None, branch=None, force=False, env=None, timeout=20 * 60, logEnviron=True, sshPrivateKey=None, sshHostKey=None, sshKnownHosts=None, config=None, **kwargs): self.workdir = workdir self.repourl = repourl self.branch = branch self.force = force self.env = env self.timeout = timeout self.logEnviron = logEnviron self.sshPrivateKey = sshPrivateKey self.sshHostKey = sshHostKey self.sshKnownHosts = sshKnownHosts self.config = config super().__init__(**kwargs) self.setupGitStep() if not self.branch: bbconfig.error('GitPush: must provide branch') def _getSshDataWorkDir(self): return self.workdir @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") try: gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerTooOldError("git is not installed on worker") yield self._downloadSshPrivateKeyIfNeeded() ret = yield self._doPush() yield self._removeSshPrivateKeyIfNeeded() return ret except Exception as e: yield self._removeSshPrivateKeyIfNeeded() raise e @defer.inlineCallbacks def _doPush(self): cmd = ['push', self.repourl, self.branch] if self.force: cmd.append('--force') ret = yield self._dovccmd(cmd) return ret class GitTag(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gittag' renderables = ['repourl', 'tagName', 'messages'] def __init__(self, workdir=None, tagName=None, annotated=False, messages=None, force=False, env=None, timeout=20 * 60, logEnviron=True, config=None, **kwargs): self.workdir = workdir self.tagName = tagName self.annotated = annotated self.messages = messages self.force = force self.env = env self.timeout = timeout self.logEnviron = logEnviron self.config = config # These attributes are required for GitStepMixin but not useful to tag self.repourl = " " self.sshHostKey = None self.sshPrivateKey = None self.sshKnownHosts = None super().__init__(**kwargs) self.setupGitStep() if not self.tagName: bbconfig.error('GitTag: must provide tagName') if self.annotated and not self.messages: bbconfig.error('GitTag: must provide messages in case of annotated tag') if not self.annotated and self.messages: bbconfig.error('GitTag: messages are required only in case of annotated tag') if self.messages and not isinstance(self.messages, list): bbconfig.error('GitTag: messages should be a list') @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerTooOldError("git is not installed on worker") ret = yield self._doTag() return ret @defer.inlineCallbacks def _doTag(self): cmd = ['tag'] if self.annotated: cmd.append('-a') cmd.append(self.tagName) for msg in self.messages: cmd.extend(['-m', msg]) else: cmd.append(self.tagName) if self.force: cmd.append('--force') ret = yield self._dovccmd(cmd) return ret class GitCommit(buildstep.BuildStep, GitStepMixin, CompositeStepMixin): description = None descriptionDone = None descriptionSuffix = None name = 'gitcommit' renderables = ['paths', 'messages'] def __init__(self, workdir=None, paths=None, messages=None, env=None, timeout=20 * 60, logEnviron=True, emptyCommits='disallow', config=None, **kwargs): self.workdir = workdir self.messages = messages self.paths = paths self.env = env self.timeout = timeout self.logEnviron = logEnviron self.config = config self.emptyCommits = emptyCommits # The repourl, sshPrivateKey and sshHostKey attributes are required by # GitStepMixin, but aren't needed by git add and commit operations self.repourl = " " self.sshPrivateKey = None self.sshHostKey = None self.sshKnownHosts = None super().__init__(**kwargs) self.setupGitStep() if not self.messages: bbconfig.error('GitCommit: must provide messages') if not isinstance(self.messages, list): bbconfig.error('GitCommit: messages must be a list') if not self.paths: bbconfig.error('GitCommit: must provide paths') if not isinstance(self.paths, list): bbconfig.error('GitCommit: paths must be a list') if self.emptyCommits not in ('disallow', 'create-empty-commit', 'ignore'): bbconfig.error('GitCommit: emptyCommits must be one of "disallow", ' '"create-empty-commit" and "ignore"') @defer.inlineCallbacks def run(self): self.stdio_log = yield self.addLog("stdio") gitInstalled = yield self.checkFeatureSupport() if not gitInstalled: raise WorkerTooOldError("git is not installed on worker") yield self._checkDetachedHead() yield self._doAdd() yield self._doCommit() return RC_SUCCESS @defer.inlineCallbacks def _checkDetachedHead(self): cmd = ['symbolic-ref', 'HEAD'] rc = yield self._dovccmd(cmd, abandonOnFailure=False) if rc != RC_SUCCESS: self.stdio_log.addStderr("You are in detached HEAD") raise buildstep.BuildStepFailed @defer.inlineCallbacks def _checkHasSomethingToCommit(self): cmd = ['status', '--porcelain=v1'] stdout = yield self._dovccmd(cmd, collectStdout=True) for line in stdout.splitlines(False): if line[0] in 'MADRCU': return True return False @defer.inlineCallbacks def _doCommit(self): if self.emptyCommits == 'ignore': has_commit = yield self._checkHasSomethingToCommit() if not has_commit: return 0 cmd = ['commit'] for message in self.messages: cmd.extend(['-m', message]) if self.emptyCommits == 'create-empty-commit': cmd.extend(['--allow-empty']) ret = yield self._dovccmd(cmd) return ret @defer.inlineCallbacks def _doAdd(self): cmd = ['add'] cmd.extend(self.paths) ret = yield self._dovccmd(cmd) return ret buildbot-2.6.0/master/buildbot/steps/source/github.py000066400000000000000000000017571361162603000227240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.steps.source.git import Git class GitHub(Git): def startVC(self, branch, revision, patch): # ignore the revision if the branch ends with /merge if branch.endswith("/merge"): revision = None super(GitHub, self).startVC(branch, revision, patch) buildbot-2.6.0/master/buildbot/steps/source/gitlab.py000066400000000000000000000041451361162603000226760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from buildbot.steps.source.git import Git class GitLab(Git): """ Source step that knows how to handle merge requests from the GitLab change source """ def startVC(self, branch, revision, patch): # If this is a merge request: if self.build.hasProperty("target_branch"): target_repourl = self.build.getProperty("target_git_ssh_url", None) if self.repourl != target_repourl: log.msg("GitLab.startVC: note: GitLab step for merge requests" " should probably have repourl='%s' instead of '%s'?" % (target_repourl, self.repourl)) # This step is (probably) configured to fetch the target # branch of a merge (because it is impractical for users to # configure one builder for each of the infinite number of # possible source branches for merge requests). # Point instead to the source being proposed for merge. branch = self.build.getProperty("source_branch", None) # FIXME: layering violation, should not be modifying self here? self.repourl = self.build.getProperty("source_git_ssh_url", None) # The revision is unlikely to exist in the repo already, # so tell Git to not check. revision = None super(GitLab, self).startVC(branch, revision, patch) buildbot-2.6.0/master/buildbot/steps/source/mercurial.py000066400000000000000000000335221361162603000234200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for mercurial """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Mercurial(Source): """ Class for Mercurial with all the smarts """ name = "hg" renderables = ["repourl"] possible_methods = (None, 'clean', 'fresh', 'clobber') possible_branchTypes = ('inrepo', 'dirname') def __init__(self, repourl=None, mode='incremental', method=None, defaultBranch=None, branchType='dirname', clobberOnBranchChange=True, **kwargs): """ @type repourl: string @param repourl: the URL which points at the Mercurial repository. if 'dirname' branches are enabled, this is the base URL to which a branch name will be appended. It should probably end in a slash. @param defaultBranch: if branches are enabled, this is the branch to use if the Build does not specify one explicitly. For 'dirname' branches, It will simply be appended to C{repourl} and the result handed to the 'hg update' command. For 'inrepo' branches, this specifies the named revision to which the tree will update after a clone. @param branchType: either 'dirname' or 'inrepo' depending on whether the branch name should be appended to the C{repourl} or the branch is a mercurial named branch and can be found within the C{repourl} @param clobberOnBranchChange: boolean, defaults to True. If set and using inrepos branches, clobber the tree at each branch change. Otherwise, just update to the branch. """ self.repourl = repourl self.defaultBranch = self.branch = defaultBranch self.branchType = branchType self.method = method self.clobberOnBranchChange = clobberOnBranchChange self.mode = mode super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) if self.method not in self.possible_methods: errors.append("method %s is not one of %s" % (self.method, self.possible_methods)) if self.branchType not in self.possible_branchTypes: errors.append("branchType %s is not one of %s" % (self.branchType, self.possible_branchTypes)) if repourl is None: errors.append("you must provide a repourl") if errors: raise ConfigErrors(errors) def startVC(self, branch, revision, patch): self.revision = revision self.method = self._getMethod() self.stdio_log = self.addLogForRemoteCommands("stdio") d = self.checkHg() @d.addCallback def checkInstall(hgInstalled): if not hgInstalled: raise WorkerTooOldError( "Mercurial is not installed on worker") return 0 d.addCallback(lambda _: self.sourcedirIsPatched()) if self.branchType == 'dirname': self.repourl = self.repourl + (branch or '') self.branch = self.defaultBranch self.update_branch = branch elif self.branchType == 'inrepo': self.update_branch = (branch or 'default') d.addCallback(self._getAttrGroupMember('mode', self.mode)) if patch: d.addCallback(self.patch, patch) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) @defer.inlineCallbacks def mode_full(self, _): if self.method == 'clobber': yield self.clobber() return updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self._clone() yield self._update(None) elif self.method == 'clean': yield self.clean(None) elif self.method == 'fresh': yield self.fresh(None) else: raise ValueError("Unknown method, check your configuration") def mode_incremental(self, _): if self.method is not None: raise ValueError(self.method) d = self._sourcedirIsUpdatable() @defer.inlineCallbacks def _cmd(updatable): if updatable: yield self._dovccmd(self.getHgPullCommand()) return yield self._clone() return d.addCallback(_cmd) d.addCallback(self._checkBranchChange) return d def clean(self, _): command = ['--config', 'extensions.purge=', 'purge'] d = self._dovccmd(command) d.addCallback(self._pullUpdate) return d def _clobber(self): cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron}) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) return d def clobber(self): d = self._clobber() d.addCallback(lambda _: self._clone()) d.addCallback(self._update) return d def fresh(self, _): command = ['--config', 'extensions.purge=', 'purge', '--all'] d = self._dovccmd(command) d.addCallback(self._pullUpdate) return d def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) return results d.addCallback(self.finished) return d def parseGotRevision(self, _): d = self._dovccmd( ['parents', '--template', '{node}\\n'], collectStdout=True) @d.addCallback def _setrev(stdout): revision = stdout.strip() if len(revision) != 40: raise ValueError("Incorrect revision id") log.msg("Got Mercurial revision %s" % (revision, )) self.updateSourceProperty('got_revision', revision) return 0 return d @defer.inlineCallbacks def _checkBranchChange(self, _): current_branch = yield self._getCurrentBranch() msg = "Working dir is on in-repo branch '%s' and build needs '%s'." % \ (current_branch, self.update_branch) if current_branch != self.update_branch and self.clobberOnBranchChange: msg += ' Clobbering.' log.msg(msg) yield self.clobber() return msg += ' Updating.' log.msg(msg) yield self._removeAddedFilesAndUpdate(None) def getHgPullCommand(self): command = ['pull', self.repourl] if self.revision: command.extend(['--rev', self.revision]) elif self.branchType == 'inrepo': command.extend(['--rev', self.update_branch]) return command def _pullUpdate(self, res): command = self.getHgPullCommand() d = self._dovccmd(command) d.addCallback(self._checkBranchChange) return d def _dovccmd(self, command, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} cmd = remotecommand.RemoteShellCommand(self.workdir, ['hg', '--verbose'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc return d def computeSourceRevision(self, changes): if not changes: return None # without knowing the revision ancestry graph, we can't sort the # changes at all. So for now, assume they were given to us in sorted # order, and just pay attention to the last one. See ticket #103 for # more details. if len(changes) > 1: log.msg("Mercurial.computeSourceRevision: warning: " "there are %d changes here, assuming the last one is " "the most recent" % len(changes)) return changes[-1].revision def _getCurrentBranch(self): if self.branchType == 'dirname': return defer.succeed(self.branch) d = self._dovccmd(['identify', '--branch'], collectStdout=True) @d.addCallback def _getbranch(stdout): return stdout.strip() return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' def _sourcedirIsUpdatable(self): return self.pathExists(self.build.path_module.join(self.workdir, '.hg')) def _removeAddedFilesAndUpdate(self, _): command = ['locate', 'set:added()'] d = self._dovccmd( command, collectStdout=True, decodeRC={0: SUCCESS, 1: SUCCESS}) @d.addCallback def parseAndRemove(stdout): files = [] for filename in stdout.splitlines(): filename = self.workdir + '/' + filename files.append(filename) if not files: d = defer.succeed(0) else: if self.workerVersionIsOlderThan('rmdir', '2.14'): d = self.removeFiles(files) else: cmd = remotecommand.RemoteCommand('rmdir', {'dir': files, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) d.addCallback(lambda _: cmd.rc) return d d.addCallback(self._update) return d @defer.inlineCallbacks def removeFiles(self, files): for filename in files: cmd = remotecommand.RemoteCommand('rmdir', {'dir': filename, 'logEnviron': self.logEnviron, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.rc != 0: return cmd.rc return 0 def _update(self, _): command = ['update', '--clean'] if self.revision: command += ['--rev', self.revision] elif self.branchType == 'inrepo': command += ['--rev', self.update_branch] d = self._dovccmd(command) return d def _clone(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._dovccmd(['clone', '--noupdate', self.repourl, '.'], abandonOnFailure=abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._clobber()) df.addCallback(lambda _: self._clone()) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d def checkHg(self): d = self._dovccmd(['--version']) @d.addCallback def check(res): return res == 0 return d def applyPatch(self, patch): d = self._dovccmd(['import', '--no-commit', '-p', str(patch[0]), '-'], initialStdin=patch[1]) return d buildbot-2.6.0/master/buildbot/steps/source/mtn.py000066400000000000000000000310411361162603000222250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Source step code for Monotone """ from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source class Monotone(Source): """ Class for Monotone with all smarts """ name = 'monotone' renderables = ['repourl'] possible_methods = ('clobber', 'copy', 'fresh', 'clean') def __init__(self, repourl=None, branch=None, progress=False, mode='incremental', method=None, **kwargs): self.repourl = repourl self.method = method self.mode = mode self.branch = branch self.sourcedata = "%s?%s" % (self.repourl, self.branch) self.database = 'db.mtn' self.progress = progress super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) if self.mode == 'incremental' and self.method: errors.append("Incremental mode does not require method") if self.mode == 'full': if self.method is None: self.method = 'copy' elif self.method not in self.possible_methods: errors.append("Invalid method for mode == %s" % (self.mode)) if repourl is None: errors.append("you must provide repourl") if branch is None: errors.append("you must provide branch") if errors: raise ConfigErrors(errors) @defer.inlineCallbacks def startVC(self, branch, revision, patch): self.revision = revision self.stdio_log = self.addLogForRemoteCommands("stdio") try: monotoneInstalled = yield self.checkMonotone() if not monotoneInstalled: raise WorkerTooOldError("Monotone is not installed on worker") yield self._checkDb() yield self._retryPull() # If we're not throwing away the workdir, check if it's # somehow patched or modified and revert. if self.mode != 'full' or self.method not in ('clobber', 'copy'): patched = yield self.sourcedirIsPatched() if patched: yield self.clean() # Call a mode specific method fn = self._getAttrGroupMember('mode', self.mode) yield fn() if patch: yield self.patch(None, patch) yield self.parseGotRevision() self.finish() except Exception as e: self.failed(e) @defer.inlineCallbacks def mode_full(self): if self.method == 'clobber': yield self.clobber() return elif self.method == 'copy': yield self.copy() return updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self.clobber() elif self.method == 'clean': yield self.clean() yield self._update() elif self.method == 'fresh': yield self.clean(False) yield self._update() else: raise ValueError("Unknown method, check your configuration") @defer.inlineCallbacks def mode_incremental(self): updatable = yield self._sourcedirIsUpdatable() if not updatable: yield self.clobber() else: yield self._update() @defer.inlineCallbacks def clobber(self): yield self.runRmdir(self.workdir) yield self._checkout() @defer.inlineCallbacks def copy(self): cmd = remotecommand.RemoteCommand('rmdir', { 'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'source' yield self.mode_incremental() cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': self.logEnviron, 'timeout': self.timeout, }) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) self.workdir = 'build' return 0 @defer.inlineCallbacks def checkMonotone(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['mtn', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) return cmd.rc == 0 @defer.inlineCallbacks def clean(self, ignore_ignored=True): files = [] commands = [['mtn', 'ls', 'unknown']] if not ignore_ignored: commands.append(['mtn', 'ls', 'ignored']) for cmd in commands: stdout = yield self._dovccmd(cmd, workdir=self.workdir, collectStdout=True) if not stdout: continue for filename in stdout.strip().split('\n'): filename = self.workdir + '/' + str(filename) files.append(filename) if not files: rc = 0 else: if self.workerVersionIsOlderThan('rmdir', '2.14'): rc = yield self.removeFiles(files) else: rc = yield self.runRmdir(files, abandonOnFailure=False) if rc != 0: log.msg("Failed removing files") raise buildstep.BuildStepFailed() @defer.inlineCallbacks def removeFiles(self, files): for filename in files: res = yield self.runRmdir(filename, abandonOnFailure=False) if res: return res return 0 def _checkout(self, abandonOnFailure=False): command = ['mtn', 'checkout', self.workdir, '--db', self.database] if self.revision: command.extend(['--revision', self.revision]) command.extend(['--branch', self.branch]) return self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure) def _update(self, abandonOnFailure=False): command = ['mtn', 'update'] if self.revision: command.extend(['--revision', self.revision]) else: command.extend(['--revision', 'h:' + self.branch]) command.extend(['--branch', self.branch]) return self._dovccmd(command, workdir=self.workdir, abandonOnFailure=abandonOnFailure) def _pull(self, abandonOnFailure=False): command = ['mtn', 'pull', self.sourcedata, '--db', self.database] if self.progress: command.extend(['--ticker=dot']) else: command.extend(['--ticker=none']) d = self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure) return d @defer.inlineCallbacks def _retryPull(self): if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True res = yield self._pull(abandonOnFailure) if self.retry: delay, repeats = self.retry if self.stopped or res == 0 or repeats <= 0: return res else: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self._retryPull()) reactor.callLater(delay, df.callback, None) yield df @defer.inlineCallbacks def parseGotRevision(self): stdout = yield self._dovccmd(['mtn', 'automate', 'select', 'w:'], workdir=self.workdir, collectStdout=True) revision = stdout.strip() if len(revision) != 40: raise buildstep.BuildStepFailed() log.msg("Got Monotone revision %s" % (revision, )) self.updateSourceProperty('got_revision', revision) return 0 @defer.inlineCallbacks def _dovccmd(self, command, workdir, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True): if not command: raise ValueError("No command specified") if decodeRC is None: decodeRC = {0: SUCCESS} cmd = buildstep.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin, decodeRC=decodeRC) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout else: return cmd.rc @defer.inlineCallbacks def _checkDb(self): db_exists = yield self.pathExists(self.database) db_needs_init = False if db_exists: stdout = yield self._dovccmd( ['mtn', 'db', 'info', '--db', self.database], workdir='.', collectStdout=True) if stdout.find("migration needed") >= 0: log.msg("Older format database found, migrating it") yield self._dovccmd(['mtn', 'db', 'migrate', '--db', self.database], workdir='.') elif stdout.find("too new, cannot use") >= 0 or \ stdout.find("database has no tables") >= 0: # The database is of a newer format which the worker's # mtn version can not handle. Drop it and pull again # with that monotone version installed on the # worker. Do the same if it's an empty file. yield self.runRmdir(self.database) db_needs_init = True elif stdout.find("not a monotone database") >= 0: # There exists a database file, but it's not a valid # monotone database. Do not delete it, but fail with # an error. raise buildstep.BuildStepFailed() else: log.msg("Database exists and compatible") else: db_needs_init = True log.msg("Database does not exist") if db_needs_init: command = ['mtn', 'db', 'init', '--db', self.database] yield self._dovccmd(command, workdir='.') @defer.inlineCallbacks def _sourcedirIsUpdatable(self): workdir_path = self.build.path_module.join(self.workdir, '_MTN') workdir_exists = yield self.pathExists(workdir_path) if not workdir_exists: log.msg("Workdir does not exist, falling back to a fresh clone") return workdir_exists def finish(self): self.setStatus(self.cmd, 0) log.msg("Closing log, sending result of the command %s " % (self.cmd)) return self.finished(0) buildbot-2.6.0/master/buildbot/steps/source/p4.py000066400000000000000000000365021361162603000217610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Portions Copyright 2013 Bad Dog Consulting import re from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot import interfaces from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process.properties import Interpolate from buildbot.steps.source import Source # Notes: # see # http://perforce.com/perforce/doc.current/manuals/cmdref/o.gopts.html#1040647 # for getting p4 command to output marshalled python dictionaries as output # for commands. # Perhaps switch to using 'p4 -G' : From URL above: # -G Causes all output (and batch input for form commands with -i) to be # formatted as marshalled Python dictionary objects. This is most often used # when scripting. debug_logging = False class P4(Source): """Perform Perforce checkout/update operations.""" name = 'p4' renderables = ['mode', 'p4base', 'p4client', 'p4viewspec', 'p4branch'] possible_modes = ('incremental', 'full') def __init__(self, mode='incremental', method=None, p4base=None, p4branch=None, p4port=None, p4user=None, p4passwd=None, p4extra_views=(), p4line_end='local', p4viewspec=None, p4viewspec_suffix='...', p4client=Interpolate( 'buildbot_%(prop:workername)s_%(prop:buildername)s'), p4client_spec_options='allwrite rmdir', p4extra_args=None, p4bin='p4', use_tickets=False, **kwargs): self.method = method self.mode = mode self.p4branch = p4branch self.p4bin = p4bin self.p4base = p4base self.p4port = p4port self.p4user = p4user self.p4passwd = p4passwd self.p4extra_views = p4extra_views self.p4viewspec = p4viewspec self.p4viewspec_suffix = p4viewspec_suffix self.p4line_end = p4line_end self.p4client = p4client self.p4client_spec_options = p4client_spec_options self.p4extra_args = p4extra_args self.use_tickets = use_tickets super().__init__(**kwargs) if self.mode not in self.possible_modes and not interfaces.IRenderable.providedBy(self.mode): config.error("mode %s is not an IRenderable, or one of %s" % ( self.mode, self.possible_modes)) if not p4viewspec and p4base is None: config.error("You must provide p4base or p4viewspec") if p4viewspec and (p4base or p4branch or p4extra_views): config.error( "Either provide p4viewspec or p4base and p4branch (and optionally p4extra_views") if p4viewspec and isinstance(p4viewspec, str): config.error( "p4viewspec must not be a string, and should be a sequence of 2 element sequences") if not interfaces.IRenderable.providedBy(p4base) and p4base and p4base.endswith('/'): config.error( 'p4base should not end with a trailing / [p4base = %s]' % p4base) if not interfaces.IRenderable.providedBy(p4branch) and p4branch and p4branch.endswith('/'): config.error( 'p4branch should not end with a trailing / [p4branch = %s]' % p4branch) if (p4branch or p4extra_views) and not p4base: config.error( 'If you specify either p4branch or p4extra_views you must also specify p4base') if self.p4client_spec_options is None: self.p4client_spec_options = '' def startVC(self, branch, revision, patch): if debug_logging: log.msg('in startVC') self.revision = revision self.method = self._getMethod() self.stdio_log = self.addLogForRemoteCommands("stdio") d = self.checkP4() @d.addCallback def checkInstall(p4Installed): if not p4Installed: raise WorkerTooOldError("p4 is not installed on worker") return 0 # Try to obfuscate the password when used as an argument to commands. if self.p4passwd is not None: if not self.workerVersionIsOlderThan('shell', '2.16'): self.p4passwd_arg = ('obfuscated', self.p4passwd, 'XXXXXX') else: self.p4passwd_arg = self.p4passwd log.msg("Worker does not understand obfuscation; " "p4 password will be logged") if self.use_tickets and self.p4passwd: d.addCallback(self._acquireTicket) d.addCallback(self._getAttrGroupMember('mode', self.mode)) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) return d @defer.inlineCallbacks def mode_full(self, _): if debug_logging: log.msg("P4:full()..") # First we need to create the client yield self._createClientSpec() # Then p4 sync #none yield self._dovccmd(['sync', '#none']) # Then remove directory. yield self.runRmdir(self.workdir) # Then we need to sync the client if self.revision: if debug_logging: log.msg("P4: full() sync command based on :base:%s changeset:%d", self._getP4BaseForLog(), int(self.revision)) yield self._dovccmd(['sync', '%s...@%d' % ( self._getP4BaseForCommand(), int(self.revision))], collectStdout=True) else: if debug_logging: log.msg( "P4: full() sync command based on :base:%s no revision", self._getP4BaseForLog()) yield self._dovccmd(['sync'], collectStdout=True) if debug_logging: log.msg("P4: full() sync done.") @defer.inlineCallbacks def mode_incremental(self, _): if debug_logging: log.msg("P4:incremental()") # First we need to create the client yield self._createClientSpec() # and plan to do a checkout command = ['sync', ] if self.revision: command.extend( ['%s...@%d' % (self._getP4BaseForCommand(), int(self.revision))]) if debug_logging: log.msg( "P4:incremental() command:%s revision:%s", command, self.revision) yield self._dovccmd(command) def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) return results d.addCallback(self.finished) return d def _getP4BaseForLog(self): return self.p4base or '' def _getP4BaseForCommand(self): return self.p4base or '' def _buildVCCommand(self, doCommand): assert doCommand, "No command specified" command = [self.p4bin, ] if self.p4port: command.extend(['-p', self.p4port]) if self.p4user: command.extend(['-u', self.p4user]) if not self.use_tickets and self.p4passwd: command.extend(['-P', self.p4passwd_arg]) if self.p4client: command.extend(['-c', self.p4client]) # Only add the extra arguments for the `sync` command. if doCommand[0] == 'sync' and self.p4extra_args: command.extend(self.p4extra_args) command.extend(doCommand) return command def _dovccmd(self, command, collectStdout=False, initialStdin=None): command = self._buildVCCommand(command) if debug_logging: log.msg("P4:_dovccmd():workdir->%s" % self.workdir) cmd = buildstep.RemoteShellCommand(self.workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, initialStdin=initialStdin,) cmd.useLog(self.stdio_log, False) if debug_logging: log.msg("Starting p4 command : p4 %s" % (" ".join(command),)) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if cmd.rc != 0: if debug_logging: log.msg( "P4:_dovccmd():Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' def _sourcedirIsUpdatable(self): # In general you should always be able to write to the directory # You just specified as the root of your client # So just return. # If we find a case where this is no longer true, then this # needs to be implemented return defer.succeed(True) @defer.inlineCallbacks def _createClientSpec(self): builddir = self.getProperty('builddir') if debug_logging: log.msg("P4:_createClientSpec() builddir:%s" % builddir) log.msg("P4:_createClientSpec() SELF.workdir:%s" % self.workdir) prop_dict = self.getProperties().asDict() prop_dict['p4client'] = self.p4client client_spec = '' client_spec += "Client: %s\n\n" % self.p4client client_spec += "Owner: %s\n\n" % self.p4user client_spec += "Description:\n\tCreated by %s\n\n" % self.p4user client_spec += "Root:\t%s\n\n" % self.build.path_module.normpath( self.build.path_module.join(builddir, self.workdir) ) client_spec += "Options:\t%s\n\n" % self.p4client_spec_options if self.p4line_end: client_spec += "LineEnd:\t%s\n\n" % self.p4line_end else: client_spec += "LineEnd:\tlocal\n\n" # Setup a view client_spec += "View:\n" def has_whitespace(*args): return any([re.search(r'\s', i) for i in args if i is not None]) if self.p4viewspec: # uses only p4viewspec array of tuples to build view # If the user specifies a viewspec via an array of tuples then # Ignore any specified p4base,p4branch, and/or p4extra_views suffix = self.p4viewspec_suffix or '' for k, v in self.p4viewspec: if debug_logging: log.msg('P4:_createClientSpec():key:%s value:%s' % (k, v)) qa = '"' if has_whitespace(k, suffix) else '' qb = '"' if has_whitespace(self.p4client, v, suffix) else '' client_spec += '\t%s%s%s%s %s//%s/%s%s%s\n' % (qa, k, suffix, qa, qb, self.p4client, v, suffix, qb) else: # Uses p4base, p4branch, p4extra_views qa = '"' if has_whitespace(self.p4base, self.p4branch) else '' client_spec += "\t%s%s" % (qa, self.p4base) if self.p4branch: client_spec += "/%s" % (self.p4branch) client_spec += "/...%s " % qa qb = '"' if has_whitespace(self.p4client) else '' client_spec += "%s//%s/...%s\n" % (qb, self.p4client, qb) if self.p4extra_views: for k, v in self.p4extra_views: qa = '"' if has_whitespace(k) else '' qb = '"' if has_whitespace(k, self.p4client, v) else '' client_spec += "\t%s%s/...%s %s//%s/%s/...%s\n" % (qa, k, qa, qb, self.p4client, v, qb) if debug_logging: log.msg(client_spec) stdout = yield self._dovccmd(['client', '-i'], collectStdout=True, initialStdin=client_spec) mo = re.search(r'Client (\S+) (.+)$', stdout, re.M) return mo and (mo.group(2) == 'saved.' or mo.group(2) == 'not changed.') @defer.inlineCallbacks def _acquireTicket(self, _): if debug_logging: log.msg("P4:acquireTicket()") # TODO: check first if the ticket is still valid? initialStdin = self.p4passwd + "\n" yield self._dovccmd(['login'], initialStdin=initialStdin) def parseGotRevision(self, _): command = self._buildVCCommand(['changes', '-m1', '#have']) cmd = buildstep.RemoteShellCommand(self.workdir, command, env=self.env, timeout=self.timeout, logEnviron=self.logEnviron, collectStdout=True) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def _setrev(_): stdout = cmd.stdout.strip() # Example output from p4 changes -m1 #have # Change 212798 on 2012/04/13 by user@user-unix-bldng2 'change to # pickup build' revision = stdout.split()[1] try: int(revision) except ValueError: msg = ("p4.parseGotRevision unable to parse output " "of 'p4 changes -m1 \"#have\"': '%s'" % stdout) log.msg(msg) raise buildstep.BuildStepFailed() if debug_logging: log.msg("Got p4 revision %s" % (revision,)) self.updateSourceProperty('got_revision', revision) return 0 return d def purge(self, ignore_ignores): """Delete everything that shown up on status.""" command = ['sync', '#none'] if ignore_ignores: command.append('--no-ignore') d = self._dovccmd(command, collectStdout=True) # add deferred to rm tree # then add defer to sync to revision return d def checkP4(self): cmd = buildstep.RemoteShellCommand(self.workdir, ['p4', '-V'], env=self.env, logEnviron=self.logEnviron) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluate(_): if cmd.rc != 0: return False return True return d def computeSourceRevision(self, changes): if not changes or None in [c.revision for c in changes]: return None lastChange = max([int(c.revision) for c in changes]) return lastChange buildbot-2.6.0/master/buildbot/steps/source/repo.py000066400000000000000000000462751361162603000224130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import textwrap from twisted.internet import defer from twisted.internet import reactor from zope.interface import implementer from buildbot import util from buildbot.interfaces import IRenderable from buildbot.process import buildstep from buildbot.steps.source.base import Source @implementer(IRenderable) class RepoDownloadsFromProperties(util.ComparableMixin): parse_download_re = (re.compile(r"repo download ([^ ]+) ([0-9]+/[0-9]+)"), re.compile(r"([^ ]+) ([0-9]+/[0-9]+)"), re.compile(r"([^ ]+)/([0-9]+/[0-9]+)"), ) compare_attrs = ('names',) def __init__(self, names): self.names = names def getRenderingFor(self, props): downloads = [] for propName in self.names: s = props.getProperty(propName) if s is not None: downloads.extend(self.parseDownloadProperty(s)) return downloads def parseDownloadProperty(self, s): """ lets try to be nice in the format we want can support several instances of "repo download proj number/patch" (direct copy paste from gerrit web site) or several instances of "proj number/patch" (simpler version) This feature allows integrator to build with several pending interdependent changes. returns list of repo downloads sent to the worker """ if s is None: return [] ret = [] for cur_re in self.parse_download_re: res = cur_re.search(s) while res: ret.append("%s %s" % (res.group(1), res.group(2))) s = s[:res.start(0)] + s[res.end(0):] res = cur_re.search(s) return ret @implementer(IRenderable) class RepoDownloadsFromChangeSource(util.ComparableMixin): compare_attrs = ('codebase',) def __init__(self, codebase=None): self.codebase = codebase def getRenderingFor(self, props): downloads = [] if self.codebase is None: changes = props.getBuild().allChanges() else: changes = props.getBuild().getSourceStamp(self.codebase).changes for change in changes: if ("event.type" in change.properties and change.properties["event.type"] == "patchset-created"): downloads.append("%s %s/%s" % (change.properties["event.change.project"], change.properties[ "event.change.number"], change.properties["event.patchSet.number"])) return downloads class Repo(Source): """ Class for Repo with all the smarts """ name = 'repo' renderables = ["manifestURL", "manifestBranch", "manifestFile", "tarball", "jobs", "syncAllBranches", "updateTarballAge", "manifestOverrideUrl", "repoDownloads", "depth"] ref_not_found_re = re.compile(r"fatal: Couldn't find remote ref") cherry_pick_error_re = re.compile(r"|".join([r"Automatic cherry-pick failed", r"error: " r"fatal: " r"possibly due to conflict resolution."])) re_change = re.compile(r".* refs/changes/\d\d/(\d+)/(\d+) -> FETCH_HEAD$") re_head = re.compile(r"^HEAD is now at ([0-9a-f]+)...") # number of retries, if we detect mirror desynchronization mirror_sync_retry = 10 # wait 1min between retries (thus default total retry time is 10min) mirror_sync_sleep = 60 def __init__(self, manifestURL=None, manifestBranch="master", manifestFile="default.xml", tarball=None, jobs=None, syncAllBranches=False, updateTarballAge=7 * 24.0 * 3600.0, manifestOverrideUrl=None, repoDownloads=None, depth=0, syncQuietly=False, **kwargs): """ @type manifestURL: string @param manifestURL: The URL which points at the repo manifests repository. @type manifestBranch: string @param manifestBranch: The manifest branch to check out by default. @type manifestFile: string @param manifestFile: The manifest to use for sync. @type syncAllBranches: bool. @param syncAllBranches: true, then we must slowly synchronize all branches. @type updateTarballAge: float @param updateTarballAge: renderable to determine the update tarball policy, given properties Returns: max age of tarball in seconds, or None, if we want to skip tarball update @type manifestOverrideUrl: string @param manifestOverrideUrl: optional http URL for overriding the manifest usually coming from Property setup by a ForceScheduler @type repoDownloads: list of strings @param repoDownloads: optional repo download to perform after the repo sync @type depth: integer @param depth: optional depth parameter to repo init. If specified, create a shallow clone with given depth. @type syncQuietly: bool. @param syncQuietly: true, then suppress verbose output from repo sync. """ self.manifestURL = manifestURL self.manifestBranch = manifestBranch self.manifestFile = manifestFile self.tarball = tarball self.jobs = jobs self.syncAllBranches = syncAllBranches self.updateTarballAge = updateTarballAge self.manifestOverrideUrl = manifestOverrideUrl if repoDownloads is None: repoDownloads = [] self.repoDownloads = repoDownloads self.depth = depth self.syncQuietly = syncQuietly super().__init__(**kwargs) assert self.manifestURL is not None def computeSourceRevision(self, changes): if not changes: return None return changes[-1].revision def filterManifestPatches(self): """ Patches to manifest projects are a bit special. repo does not support a way to download them automatically, so we need to implement the boilerplate manually. This code separates the manifest patches from the other patches, and generates commands to import those manifest patches. """ manifest_unrelated_downloads = [] manifest_related_downloads = [] for download in self.repoDownloads: project, ch_ps = download.split(" ")[-2:] if (self.manifestURL.endswith("/" + project) or self.manifestURL.endswith("/" + project + ".git")): ch, ps = map(int, ch_ps.split("/")) branch = "refs/changes/%02d/%d/%d" % (ch % 100, ch, ps) manifest_related_downloads.append( ["git", "fetch", self.manifestURL, branch]) manifest_related_downloads.append( ["git", "cherry-pick", "FETCH_HEAD"]) else: manifest_unrelated_downloads.append(download) self.repoDownloads = manifest_unrelated_downloads self.manifestDownloads = manifest_related_downloads def _repoCmd(self, command, abandonOnFailure=True, **kwargs): return self._Cmd(["repo"] + command, abandonOnFailure=abandonOnFailure, **kwargs) def _Cmd(self, command, abandonOnFailure=True, workdir=None, **kwargs): if workdir is None: workdir = self.workdir cmd = buildstep.RemoteShellCommand(workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, **kwargs) self.lastCommand = cmd # does not make sense to logEnviron for each command (just for first) self.logEnviron = False cmd.useLog(self.stdio_log, False) self.stdio_log.addHeader( "Starting command: %s\n" % (" ".join(command), )) self.step_status.setText(["%s" % (" ".join(command[:2]))]) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if abandonOnFailure and cmd.didFail(): self.descriptionDone = "repo failed at: %s" % ( " ".join(command[:2])) self.stdio_log.addStderr( "Source step failed while running command %s\n" % cmd) raise buildstep.BuildStepFailed() return cmd.rc return d def repoDir(self): return self.build.path_module.join(self.workdir, ".repo") def sourcedirIsUpdateable(self): return self.pathExists(self.repoDir()) def startVC(self, branch, revision, patch): d = self.doStartVC() d.addErrback(self.failed) @defer.inlineCallbacks def doStartVC(self): self.stdio_log = self.addLogForRemoteCommands("stdio") self.filterManifestPatches() if self.repoDownloads: self.stdio_log.addHeader( "will download:\n" + "repo download " + "\nrepo download ".join(self.repoDownloads) + "\n") self.willRetryInCaseOfFailure = True d = self.doRepoSync() @d.addErrback def maybeRetry(why): # in case the tree was corrupted somehow because of previous build # we clobber one time, and retry everything if why.check(buildstep.BuildStepFailed) and self.willRetryInCaseOfFailure: self.stdio_log.addStderr("got issue at first try:\n" + str(why) + "\nRetry after clobber...") return self.doRepoSync(forceClobber=True) return why # propagate to self.failed yield d yield self.maybeUpdateTarball() # starting from here, clobbering will not help yield self.doRepoDownloads() self.setStatus(self.lastCommand, 0) yield self.finished(0) @defer.inlineCallbacks def doClobberStart(self): yield self.runRmdir(self.workdir) yield self.runMkdir(self.workdir) yield self.maybeExtractTarball() @defer.inlineCallbacks def doRepoSync(self, forceClobber=False): updatable = yield self.sourcedirIsUpdateable() if not updatable or forceClobber: # no need to re-clobber in case of failure self.willRetryInCaseOfFailure = False yield self.doClobberStart() yield self.doCleanup() yield self._repoCmd(['init', '-u', self.manifestURL, '-b', self.manifestBranch, '-m', self.manifestFile, '--depth', str(self.depth)]) if self.manifestOverrideUrl: self.stdio_log.addHeader( "overriding manifest with %s\n" % (self.manifestOverrideUrl)) local_file = yield self.pathExists(self.build.path_module.join(self.workdir, self.manifestOverrideUrl)) if local_file: yield self._Cmd(["cp", "-f", self.manifestOverrideUrl, "manifest_override.xml"]) else: yield self._Cmd(["wget", self.manifestOverrideUrl, "-O", "manifest_override.xml"]) yield self._Cmd(["ln", "-sf", "../manifest_override.xml", "manifest.xml"], workdir=self.build.path_module.join(self.workdir, ".repo")) for command in self.manifestDownloads: yield self._Cmd(command, workdir=self.build.path_module.join(self.workdir, ".repo", "manifests")) command = ['sync', '--force-sync'] if self.jobs: command.append('-j' + str(self.jobs)) if not self.syncAllBranches: command.append('-c') if self.syncQuietly: command.append('-q') self.step_status.setText(["repo sync"]) self.stdio_log.addHeader("synching manifest %s from branch %s from %s\n" % (self.manifestFile, self.manifestBranch, self.manifestURL)) yield self._repoCmd(command) command = ['manifest', '-r', '-o', 'manifest-original.xml'] yield self._repoCmd(command) # check whether msg matches one of the # compiled regexps in self.re_error_messages def _findErrorMessages(self, error_re): for logname in ['stderr', 'stdout']: if not hasattr(self.lastCommand, logname): continue msg = getattr(self.lastCommand, logname) if not (re.search(error_re, msg) is None): return True return False def _sleep(self, delay): d = defer.Deferred() reactor.callLater(delay, d.callback, 1) return d @defer.inlineCallbacks def doRepoDownloads(self): self.repo_downloaded = "" for download in self.repoDownloads: command = ['download'] + download.split(' ') self.stdio_log.addHeader("downloading changeset %s\n" % (download)) retry = self.mirror_sync_retry + 1 while retry > 0: yield self._repoCmd(command, abandonOnFailure=False, collectStdout=True, collectStderr=True) if not self._findErrorMessages(self.ref_not_found_re): break retry -= 1 self.stdio_log.addStderr( "failed downloading changeset %s\n" % (download)) self.stdio_log.addHeader("wait one minute for mirror sync\n") yield self._sleep(self.mirror_sync_sleep) if retry == 0: self.descriptionDone = "repo: change %s does not exist" % download raise buildstep.BuildStepFailed() if self.lastCommand.didFail() or self._findErrorMessages(self.cherry_pick_error_re): # cherry pick error! We create a diff with status current workdir # in stdout, which reveals the merge errors and exit command = ['forall', '-c', 'git', 'diff', 'HEAD'] yield self._repoCmd(command, abandonOnFailure=False) self.descriptionDone = "download failed: %s" % download raise buildstep.BuildStepFailed() if hasattr(self.lastCommand, 'stderr'): lines = self.lastCommand.stderr.split("\n") match1 = match2 = False for line in lines: if not match1: match1 = self.re_change.match(line) if not match2: match2 = self.re_head.match(line) if match1 and match2: self.repo_downloaded += "%s/%s %s " % (match1.group(1), match1.group(2), match2.group(1)) self.setProperty("repo_downloaded", self.repo_downloaded, "Source") def computeTarballOptions(self): # Keep in mind that the compression part of tarball generation # can be non negligible tar = ['tar'] if self.tarball.endswith("pigz"): tar.append('-I') tar.append('pigz') elif self.tarball.endswith("gz"): tar.append('-z') elif self.tarball.endswith("bz2") or self.tarball.endswith("bz"): tar.append('-j') elif self.tarball.endswith("lzma"): tar.append('--lzma') elif self.tarball.endswith("lzop"): tar.append('--lzop') return tar @defer.inlineCallbacks def maybeExtractTarball(self): if self.tarball: tar = self.computeTarballOptions() + ['-xvf', self.tarball] res = yield self._Cmd(tar, abandonOnFailure=False) if res: # error with tarball.. erase repo dir and tarball yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False) yield self.runRmdir(self.repoDir(), abandonOnFailure=False) @defer.inlineCallbacks def maybeUpdateTarball(self): if not self.tarball or self.updateTarballAge is None: return # tarball path is absolute, so we cannot use worker's stat command # stat -c%Y gives mtime in second since epoch res = yield self._Cmd(["stat", "-c%Y", self.tarball], collectStdout=True, abandonOnFailure=False) if not res: tarball_mtime = int(self.lastCommand.stdout) yield self._Cmd(["stat", "-c%Y", "."], collectStdout=True) now_mtime = int(self.lastCommand.stdout) age = now_mtime - tarball_mtime if res or age > self.updateTarballAge: tar = self.computeTarballOptions() + \ ['-cvf', self.tarball, ".repo"] res = yield self._Cmd(tar, abandonOnFailure=False) if res: # error with tarball.. erase tarball, but don't fail yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False) # a simple shell script to gather all cleanup tweaks... # doing them one by one just complicate the stuff # and mess up the stdio log def _getCleanupCommand(self): """also used by tests for expectations""" return textwrap.dedent("""\ set -v if [ -d .repo/manifests ] then # repo just refuse to run if manifest is messed up # so ensure we are in a known state cd .repo/manifests rm -f .git/index.lock git fetch origin git reset --hard remotes/origin/%(manifestBranch)s git config branch.default.merge %(manifestBranch)s cd .. ln -sf manifests/%(manifestFile)s manifest.xml cd .. fi repo forall -c rm -f .git/index.lock repo forall -c git clean -f -d -x 2>/dev/null repo forall -c git reset --hard HEAD 2>/dev/null rm -f %(workdir)s/.repo/project.list """) % dict(manifestBranch=self.manifestBranch, manifestFile=self.manifestFile, workdir=self.workdir) def doCleanup(self): command = self._getCleanupCommand() return self._Cmd(["bash", "-c", command], abandonOnFailure=False) buildbot-2.6.0/master/buildbot/steps/source/svn.py000066400000000000000000000434461361162603000222510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import xml.dom.minidom import xml.parsers.expat from urllib.parse import quote as urlquote from urllib.parse import unquote as urlunquote from urllib.parse import urlparse from urllib.parse import urlunparse from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.config import ConfigErrors from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.steps.source.base import Source class SVN(Source): """I perform Subversion checkout/update operations.""" name = 'svn' renderables = ['repourl', 'password'] possible_methods = ('clean', 'fresh', 'clobber', 'copy', 'export', None) def __init__(self, repourl=None, mode='incremental', method=None, username=None, password=None, extra_args=None, keep_on_purge=None, depth=None, preferLastChangedRev=False, **kwargs): self.repourl = repourl self.username = username self.password = password self.extra_args = extra_args self.keep_on_purge = keep_on_purge or [] self.depth = depth self.method = method self.mode = mode self.preferLastChangedRev = preferLastChangedRev super().__init__(**kwargs) errors = [] if not self._hasAttrGroupMember('mode', self.mode): errors.append("mode %s is not one of %s" % (self.mode, self._listAttrGroupMembers('mode'))) if self.method not in self.possible_methods: errors.append("method %s is not one of %s" % (self.method, self.possible_methods)) if repourl is None: errors.append("you must provide repourl") if errors: raise ConfigErrors(errors) def startVC(self, branch, revision, patch): self.revision = revision self.method = self._getMethod() self.stdio_log = self.addLogForRemoteCommands("stdio") # if the version is new enough, and the password is set, then obfuscate # it if self.password is not None: if not self.workerVersionIsOlderThan('shell', '2.16'): self.password = ('obfuscated', self.password, 'XXXXXX') else: log.msg("Worker does not understand obfuscation; " "svn password will be logged") d = self.checkSvn() @d.addCallback def checkInstall(svnInstalled): if not svnInstalled: raise buildstep.BuildStepFailed( "SVN is not installed on worker") return 0 d.addCallback(lambda _: self.sourcedirIsPatched()) @d.addCallback def checkPatched(patched): if patched: return self.purge(False) return 0 d.addCallback(self._getAttrGroupMember('mode', self.mode)) if patch: d.addCallback(self.patch, patch) d.addCallback(self.parseGotRevision) d.addCallback(self.finish) d.addErrback(self.failed) return d @defer.inlineCallbacks def mode_full(self, _): if self.method == 'clobber': yield self.clobber() return elif self.method in ['copy', 'export']: yield self.copy() return updatable = yield self._sourcedirIsUpdatable() if not updatable: # blow away the old (un-updatable) directory and checkout yield self.clobber() elif self.method == 'clean': yield self.clean() elif self.method == 'fresh': yield self.fresh() @defer.inlineCallbacks def mode_incremental(self, _): updatable = yield self._sourcedirIsUpdatable() if not updatable: # blow away the old (un-updatable) directory and checkout yield self.clobber() else: # otherwise, do an update command = ['update'] if self.revision: command.extend(['--revision', str(self.revision)]) yield self._dovccmd(command) def clobber(self): d = self.runRmdir(self.workdir, timeout=self.timeout) d.addCallback(lambda _: self._checkout()) return d def fresh(self): d = self.purge(True) cmd = ['update'] if self.revision: cmd.extend(['--revision', str(self.revision)]) d.addCallback(lambda _: self._dovccmd(cmd)) return d def clean(self): d = self.purge(False) cmd = ['update'] if self.revision: cmd.extend(['--revision', str(self.revision)]) d.addCallback(lambda _: self._dovccmd(cmd)) return d @defer.inlineCallbacks def copy(self): yield self.runRmdir(self.workdir, timeout=self.timeout) checkout_dir = 'source' if self.codebase: checkout_dir = self.build.path_module.join( checkout_dir, self.codebase) # temporarily set workdir = checkout_dir and do an incremental checkout try: old_workdir = self.workdir self.workdir = checkout_dir yield self.mode_incremental(None) finally: self.workdir = old_workdir self.workdir = old_workdir # if we're copying, copy; otherwise, export from source to build if self.method == 'copy': cmd = remotecommand.RemoteCommand('cpdir', {'fromdir': checkout_dir, 'todir': self.workdir, 'logEnviron': self.logEnviron}) else: export_cmd = ['svn', 'export'] if self.revision: export_cmd.extend(["--revision", str(self.revision)]) if self.username: export_cmd.extend(['--username', self.username]) if self.password is not None: export_cmd.extend(['--password', self.password]) if self.extra_args: export_cmd.extend(self.extra_args) export_cmd.extend([checkout_dir, self.workdir]) cmd = remotecommand.RemoteShellCommand('', export_cmd, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if cmd.didFail(): raise buildstep.BuildStepFailed() def finish(self, res): d = defer.succeed(res) @d.addCallback def _gotResults(results): self.setStatus(self.cmd, results) return results d.addCallback(self.finished) return d def _dovccmd(self, command, collectStdout=False, collectStderr=False, abandonOnFailure=True): assert command, "No command specified" command.extend(['--non-interactive', '--no-auth-cache']) if self.username: command.extend(['--username', self.username]) if self.password is not None: command.extend(['--password', self.password]) if self.depth: command.extend(['--depth', self.depth]) if self.extra_args: command.extend(self.extra_args) cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn'] + command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=collectStdout, collectStderr=collectStderr) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluateCommand(_): if cmd.didFail() and abandonOnFailure: log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout and collectStderr: return (cmd.stdout, cmd.stderr) elif collectStdout: return cmd.stdout elif collectStderr: return cmd.stderr return cmd.rc return d def _getMethod(self): if self.method is not None and self.mode != 'incremental': return self.method elif self.mode == 'incremental': return None elif self.method is None and self.mode == 'full': return 'fresh' @defer.inlineCallbacks def _sourcedirIsUpdatable(self): # first, perform a stat to ensure that this is really an svn directory res = yield self.pathExists(self.build.path_module.join(self.workdir, '.svn')) if not res: return False # then run 'svn info --xml' to check that the URL matches our repourl stdout, stderr = yield self._dovccmd(['info', '--xml'], collectStdout=True, collectStderr=True, abandonOnFailure=False) # svn: E155037: Previous operation has not finished; run 'cleanup' if # it was interrupted if 'E155037:' in stderr: return False try: stdout_xml = xml.dom.minidom.parseString(stdout) extractedurl = stdout_xml.getElementsByTagName( 'url')[0].firstChild.nodeValue except xml.parsers.expat.ExpatError: msg = "Corrupted xml, aborting step" self.stdio_log.addHeader(msg) raise buildstep.BuildStepFailed() return extractedurl == self.svnUriCanonicalize(self.repourl) @defer.inlineCallbacks def parseGotRevision(self, _): # if this was a full/export, then we need to check svnversion in the # *source* directory, not the build directory svnversion_dir = self.workdir if self.mode == 'full' and self.method == 'export': svnversion_dir = 'source' cmd = remotecommand.RemoteShellCommand(svnversion_dir, ['svn', 'info', '--xml'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout, collectStdout=True) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) stdout = cmd.stdout try: stdout_xml = xml.dom.minidom.parseString(stdout) except xml.parsers.expat.ExpatError: msg = "Corrupted xml, aborting step" self.stdio_log.addHeader(msg) raise buildstep.BuildStepFailed() revision = None if self.preferLastChangedRev: try: revision = stdout_xml.getElementsByTagName( 'commit')[0].attributes['revision'].value except (KeyError, IndexError): msg = ("SVN.parseGotRevision unable to detect Last Changed Rev in" " output of svn info") log.msg(msg) # fall through and try to get 'Revision' instead if revision is None: try: revision = stdout_xml.getElementsByTagName( 'entry')[0].attributes['revision'].value except (KeyError, IndexError): msg = ("SVN.parseGotRevision unable to detect revision in" " output of svn info") log.msg(msg) raise buildstep.BuildStepFailed() msg = "Got SVN revision %s" % (revision, ) self.stdio_log.addHeader(msg) self.updateSourceProperty('got_revision', revision) return cmd.rc def purge(self, ignore_ignores): """Delete everything that shown up on status.""" command = ['status', '--xml'] if ignore_ignores: command.append('--no-ignore') d = self._dovccmd(command, collectStdout=True) @d.addCallback def parseAndRemove(stdout): files = [] for filename in self.getUnversionedFiles(stdout, self.keep_on_purge): filename = self.build.path_module.join(self.workdir, filename) files.append(filename) if not files: d = defer.succeed(0) else: if self.workerVersionIsOlderThan('rmdir', '2.14'): d = self.removeFiles(files) else: d = self.runRmdir(files, abandonOnFailure=False, timeout=self.timeout) return d @d.addCallback def evaluateCommand(rc): if rc != 0: log.msg("Failed removing files") raise buildstep.BuildStepFailed() return rc return d @staticmethod def getUnversionedFiles(xmlStr, keep_on_purge): try: result_xml = xml.dom.minidom.parseString(xmlStr) except xml.parsers.expat.ExpatError: log.err("Corrupted xml, aborting step") raise buildstep.BuildStepFailed() for entry in result_xml.getElementsByTagName('entry'): (wc_status,) = entry.getElementsByTagName('wc-status') if wc_status.getAttribute('item') == 'external': continue if wc_status.getAttribute('item') == 'missing': continue filename = entry.getAttribute('path') if filename in keep_on_purge or filename == '': continue yield filename @defer.inlineCallbacks def removeFiles(self, files): for filename in files: res = yield self.runRmdir(filename, abandonOnFailure=False, timeout=self.timeout) if res: return res return 0 def checkSvn(self): cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn', '--version'], env=self.env, logEnviron=self.logEnviron, timeout=self.timeout) cmd.useLog(self.stdio_log, False) d = self.runCommand(cmd) @d.addCallback def evaluate(_): return cmd.rc == 0 return d def computeSourceRevision(self, changes): if not changes or None in [c.revision for c in changes]: return None lastChange = max([int(c.revision) for c in changes]) return lastChange @staticmethod def svnUriCanonicalize(uri): collapse = re.compile(r'([^/]+/\.\./?|/\./|//|/\.$|/\.\.$|^/\.\.)') server_authority = re.compile(r'^(?:([^@]+)@)?([^:]+)(?::(.+))?$') default_port = {'http': '80', 'https': '443', 'svn': '3690'} relative_schemes = ['http', 'https', 'svn'] def quote(uri): return urlquote(uri, "!$&'()*+,-./:=@_~", encoding="latin-1") if not uri or uri == '/': return uri (scheme, authority, path, parameters, query, fragment) = urlparse(uri) scheme = scheme.lower() if authority: mo = server_authority.match(authority) if not mo: return uri # give up userinfo, host, port = mo.groups() if host[-1] == '.': host = host[:-1] authority = host.lower() if userinfo: authority = "%s@%s" % (userinfo, authority) if port and port != default_port.get(scheme, None): authority = "%s:%s" % (authority, port) if scheme in relative_schemes: last_path = path while True: path = collapse.sub('/', path, 1) if last_path == path: break last_path = path path = quote(urlunquote(path)) canonical_uri = urlunparse( (scheme, authority, path, parameters, query, fragment)) if canonical_uri == '/': return canonical_uri elif canonical_uri[-1] == '/' and canonical_uri[-2] != '/': return canonical_uri[:-1] return canonical_uri def _checkout(self): checkout_cmd = ['checkout', self.repourl, '.'] if self.revision: checkout_cmd.extend(["--revision", str(self.revision)]) if self.retry: abandonOnFailure = (self.retry[1] <= 0) else: abandonOnFailure = True d = self._dovccmd(checkout_cmd, abandonOnFailure=abandonOnFailure) def _retry(res): if self.stopped or res == 0: return res delay, repeats = self.retry if repeats > 0: log.msg("Checkout failed, trying %d more times after %d seconds" % (repeats, delay)) self.retry = (delay, repeats - 1) df = defer.Deferred() df.addCallback(lambda _: self.runRmdir(self.workdir, timeout=self.timeout)) df.addCallback(lambda _: self._checkout()) reactor.callLater(delay, df.callback, None) return df return res if self.retry: d.addCallback(_retry) return d buildbot-2.6.0/master/buildbot/steps/subunit.py000066400000000000000000000127441361162603000216310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from unittest import TestResult from twisted.python.compat import NativeStringIO from buildbot.process import logobserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.shell import ShellCommand class SubunitLogObserver(logobserver.LogLineObserver, TestResult): """Observe a log that may contain subunit output. This class extends TestResult to receive the callbacks from the subunit parser in the most direct fashion. """ def __init__(self): super().__init__() try: from subunit import TestProtocolServer, PROGRESS_CUR, PROGRESS_SET from subunit import PROGRESS_PUSH, PROGRESS_POP except ImportError: raise ImportError("subunit is not importable, but is required for " "SubunitLogObserver support.") self.PROGRESS_CUR = PROGRESS_CUR self.PROGRESS_SET = PROGRESS_SET self.PROGRESS_PUSH = PROGRESS_PUSH self.PROGRESS_POP = PROGRESS_POP self.warningio = NativeStringIO() self.protocol = TestProtocolServer(self, self.warningio) self.skips = [] self.seen_tags = set() # don't yet know what tags does in subunit def outLineReceived(self, line): """Process a received stdout line.""" # Impedance mismatch: subunit wants lines, observers get lines-no\n self.protocol.lineReceived(line + '\n') def errLineReceived(self, line): """same for stderr line.""" self.protocol.lineReceived(line + '\n') def stopTest(self, test): super().stopTest(test) self.step.setProgress('tests', self.testsRun) def addSkip(self, test, detail): if hasattr(TestResult, 'addSkip'): super().addSkip(test, detail) else: self.skips.append((test, detail)) def addError(self, test, err): super().addError(test, err) self.issue(test, err) def addFailure(self, test, err): super().addFailure(test, err) self.issue(test, err) def issue(self, test, err): """An issue - failing, erroring etc test.""" self.step.setProgress('tests failed', len(self.failures) + len(self.errors)) def tags(self, new_tags, gone_tags): """Accumulate the seen tags.""" self.seen_tags.update(new_tags) class SubunitShellCommand(ShellCommand): """A ShellCommand that sniffs subunit output. """ def __init__(self, failureOnNoTests=False, *args, **kwargs): super().__init__(*args, **kwargs) self.failureOnNoTests = failureOnNoTests self.ioObserver = SubunitLogObserver() self.addLogObserver('stdio', self.ioObserver) self.progressMetrics = self.progressMetrics + ('tests', 'tests failed') def commandComplete(self, cmd): # figure out all statistics about the run ob = self.ioObserver failures = len(ob.failures) errors = len(ob.errors) skips = len(ob.skips) total = ob.testsRun count = failures + errors text = [self.name] text2 = "" if not count: results = SUCCESS if total: text += ["%d %s" % (total, total == 1 and "test" or "tests"), "passed"] else: if self.failureOnNoTests: results = FAILURE text += ["no tests", "run"] else: results = FAILURE text.append("Total %d test(s)" % total) if failures: text.append("%d %s" % (failures, failures == 1 and "failure" or "failures")) if errors: text.append("%d %s" % (errors, errors == 1 and "error" or "errors")) text2 = "%d %s" % (count, (count == 1 and 'test' or 'tests')) if skips: text.append("%d %s" % (skips, skips == 1 and "skip" or "skips")) # TODO: expectedFailures/unexpectedSuccesses self.results = results self.text = text self.text2 = [text2] def evaluateCommand(self, cmd): if cmd.didFail(): return FAILURE return self.results def createSummary(self, loog): ob = self.ioObserver problems = "" for test, err in ob.errors + ob.failures: problems += "%s\n%s" % (test.id(), err) if problems: self.addCompleteLog("problems", problems) warnings = ob.warningio.getvalue() if warnings: self.addCompleteLog("warnings", warnings) def _describe(self, done): return self.text buildbot-2.6.0/master/buildbot/steps/transfer.py000066400000000000000000000503551361162603000217640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import stat from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.interfaces import WorkerTooOldError from buildbot.process import remotecommand from buildbot.process import remotetransfer from buildbot.process.buildstep import FAILURE from buildbot.process.buildstep import SKIPPED from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.steps.worker import CompositeStepMixin from buildbot.util import flatten from buildbot.util.eventual import eventually def makeStatusRemoteCommand(step, remote_command, args): self = remotecommand.RemoteCommand( remote_command, args, decodeRC={None: SUCCESS, 0: SUCCESS}) self.useLog(step.stdio_log) return self class _TransferBuildStep(BuildStep): """ Base class for FileUpload and FileDownload to factor out common functionality. """ renderables = ['workdir'] haltOnFailure = True flunkOnFailure = True def __init__(self, workdir=None, **buildstep_kwargs): super().__init__(**buildstep_kwargs) self.workdir = workdir def runTransferCommand(self, cmd, writer=None): # Run a transfer step, add a callback to extract the command status, # add an error handler that cancels the writer. self.cmd = cmd d = self.runCommand(cmd) @d.addCallback def checkResult(_): if writer and cmd.didFail(): writer.cancel() return FAILURE if cmd.didFail() else SUCCESS @d.addErrback def cancel(res): if writer: writer.cancel() return res return d def interrupt(self, reason): self.addCompleteLog('interrupt', str(reason)) if self.cmd: d = self.cmd.interrupt(reason) return d class FileUpload(_TransferBuildStep): name = 'upload' renderables = [ 'masterdest', 'url', 'urlText', 'workersrc', ] def __init__(self, workersrc=None, masterdest=None, workdir=None, maxsize=None, blocksize=256 * 1024, mode=None, keepstamp=False, url=None, urlText=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workersrc is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrc = workersrc self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode self.keepstamp = keepstamp self.url = url self.urlText = urlText def finished(self, results): log.msg("File '{}' upload finished with results {}".format( os.path.basename(self.workersrc), str(results))) self.step_status.setText(self.descriptionDone) super().finished(results) def start(self): self.checkWorkerHasCommand("uploadFile") self.stdio_log = self.addLog("stdio") source = self.workersrc masterdest = self.masterdest # we rely upon the fact that the buildmaster runs chdir'ed into its # basedir to make sure that relative paths in masterdest are expanded # properly. TODO: maybe pass the master's basedir all the way down # into the BuildStep so we can do this better. masterdest = os.path.expanduser(masterdest) log.msg("FileUpload started, from worker %r to master %r" % (source, masterdest)) if self.description is None: self.description = ['uploading %s' % (os.path.basename(source))] if self.descriptionDone is None: self.descriptionDone = self.description if self.url is not None: urlText = self.urlText if urlText is None: urlText = os.path.basename(masterdest) self.addURL(urlText, self.url) self.step_status.setText(self.description) # we use maxsize to limit the amount of data on both sides fileWriter = remotetransfer.FileWriter( masterdest, self.maxsize, self.mode) if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"): m = ("This worker (%s) does not support preserving timestamps. " "Please upgrade the worker." % self.build.workername) raise WorkerTooOldError(m) # default arguments args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'keepstamp': self.keepstamp, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadFile', args) d = self.runTransferCommand(cmd, fileWriter) d.addCallback(self.finished).addErrback(self.failed) class DirectoryUpload(_TransferBuildStep): name = 'upload' renderables = ['workersrc', 'masterdest', 'url'] def __init__(self, workersrc=None, masterdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, compress=None, url=None, urlText=None, **buildstep_kwargs ): # Emulate that first two arguments are positional. if workersrc is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrc = workersrc self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if compress not in (None, 'gz', 'bz2'): config.error( "'compress' must be one of None, 'gz', or 'bz2'") self.compress = compress self.url = url self.urlText = urlText def start(self): self.checkWorkerHasCommand("uploadDirectory") self.stdio_log = self.addLog("stdio") source = self.workersrc masterdest = self.masterdest # we rely upon the fact that the buildmaster runs chdir'ed into its # basedir to make sure that relative paths in masterdest are expanded # properly. TODO: maybe pass the master's basedir all the way down # into the BuildStep so we can do this better. masterdest = os.path.expanduser(masterdest) log.msg("DirectoryUpload started, from worker %r to master %r" % (source, masterdest)) self.descriptionDone = "uploading %s" % os.path.basename(source) if self.url is not None: urlText = self.urlText if urlText is None: urlText = os.path.basename(os.path.normpath(masterdest)) self.addURL(urlText, self.url) # we use maxsize to limit the amount of data on both sides dirWriter = remotetransfer.DirectoryWriter( masterdest, self.maxsize, self.compress, 0o600) # default arguments args = { 'workdir': self.workdir, 'writer': dirWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'compress': self.compress } if self.workerVersionIsOlderThan('uploadDirectory', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args) d = self.runTransferCommand(cmd, dirWriter) d.addCallback(self.finished).addErrback(self.failed) class MultipleFileUpload(_TransferBuildStep, CompositeStepMixin): name = 'upload' logEnviron = False renderables = ['workersrcs', 'masterdest', 'url'] def __init__(self, workersrcs=None, masterdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, glob=False, mode=None, compress=None, keepstamp=False, url=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workersrcs is None or masterdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.workersrcs = workersrcs self.masterdest = masterdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode if compress not in (None, 'gz', 'bz2'): config.error( "'compress' must be one of None, 'gz', or 'bz2'") self.compress = compress self.glob = glob self.keepstamp = keepstamp self.url = url def uploadFile(self, source, masterdest): fileWriter = remotetransfer.FileWriter( masterdest, self.maxsize, self.mode) args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'keepstamp': self.keepstamp, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadFile', args) return self.runTransferCommand(cmd, fileWriter) def uploadDirectory(self, source, masterdest): dirWriter = remotetransfer.DirectoryWriter( masterdest, self.maxsize, self.compress, 0o600) args = { 'workdir': self.workdir, 'writer': dirWriter, 'maxsize': self.maxsize, 'blocksize': self.blocksize, 'compress': self.compress } if self.workerVersionIsOlderThan('uploadDirectory', '3.0'): args['slavesrc'] = source else: args['workersrc'] = source cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args) return self.runTransferCommand(cmd, dirWriter) @defer.inlineCallbacks def startUpload(self, source, destdir): masterdest = os.path.join(destdir, os.path.basename(source)) args = { 'file': source, 'workdir': self.workdir } cmd = makeStatusRemoteCommand(self, 'stat', args) yield self.runCommand(cmd) if cmd.rc != 0: self.addCompleteLog('stderr', 'File {} not available at worker'.format(args)) return FAILURE s = cmd.updates['stat'][-1] if stat.S_ISDIR(s[stat.ST_MODE]): result = yield self.uploadDirectory(source, masterdest) elif stat.S_ISREG(s[stat.ST_MODE]): result = yield self.uploadFile(source, masterdest) else: self.addCompleteLog('stderr', '{} is neither a regular file, nor a directory'.format(source)) return FAILURE yield self.uploadDone(result, source, masterdest) return result def uploadDone(self, result, source, masterdest): pass def allUploadsDone(self, result, sources, masterdest): if self.url is not None: self.addURL( os.path.basename(os.path.normpath(masterdest)), self.url) def start(self): self.checkWorkerHasCommand("uploadDirectory") self.checkWorkerHasCommand("uploadFile") self.checkWorkerHasCommand("stat") self.stdio_log = self.addLog("stdio") masterdest = os.path.expanduser(self.masterdest) sources = self.workersrcs if isinstance(self.workersrcs, list) else [self.workersrcs] if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"): m = ("This worker (%s) does not support preserving timestamps. " "Please upgrade the worker." % self.build.workername) raise WorkerTooOldError(m) if not sources: return self.finished(SKIPPED) @defer.inlineCallbacks def globSources(sources): results = yield defer.gatherResults([ self.runGlob( os.path.join(self.workdir, source), abandonOnFailure=False) for source in sources ]) results = [self.workerPathToMasterPath(p) for p in flatten(results)] return results @defer.inlineCallbacks def uploadSources(sources): if not sources: return SKIPPED else: for source in sources: result = yield self.startUpload(source, masterdest) if result == FAILURE: return FAILURE return SUCCESS def logUpload(sources): log.msg("MultipleFileUpload started, from worker %r to master %r" % (sources, masterdest)) nsrcs = len(sources) self.descriptionDone = 'uploading %d %s' % (nsrcs, 'file' if nsrcs == 1 else 'files') return sources if self.glob: s = globSources(sources) else: s = defer.succeed(sources) s.addCallback(logUpload) d = s.addCallback(uploadSources) @d.addCallback def allUploadsDone(result): d = defer.maybeDeferred( self.allUploadsDone, result, sources, masterdest) d.addCallback(lambda _: result) return d d.addCallback(self.finished).addErrback(self.failed) class FileDownload(_TransferBuildStep): name = 'download' renderables = ['mastersrc', 'workerdest'] def __init__(self, mastersrc, workerdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, mode=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.mastersrc = mastersrc self.workerdest = workerdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( 'mode must be an integer or None') self.mode = mode def start(self): self.checkWorkerHasCommand("downloadFile") self.stdio_log = self.addLog("stdio") # we are currently in the buildmaster's basedir, so any non-absolute # paths will be interpreted relative to that source = os.path.expanduser(self.mastersrc) workerdest = self.workerdest log.msg("FileDownload started, from master %r to worker %r" % (source, workerdest)) self.descriptionDone = "downloading to %s" % os.path.basename( workerdest) # setup structures for reading the file try: fp = open(source, 'rb') except IOError: # if file does not exist, bail out with an error self.addCompleteLog('stderr', 'File %r not available at master' % source) # TODO: once BuildStep.start() gets rewritten to use # maybeDeferred, just re-raise the exception here. eventually(BuildStep.finished, self, FAILURE) return fileReader = remotetransfer.FileReader(fp) # default arguments args = { 'maxsize': self.maxsize, 'reader': fileReader, 'blocksize': self.blocksize, 'workdir': self.workdir, 'mode': self.mode, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest cmd = makeStatusRemoteCommand(self, 'downloadFile', args) d = self.runTransferCommand(cmd) d.addCallback(self.finished).addErrback(self.failed) class StringDownload(_TransferBuildStep): name = 'string_download' renderables = ['workerdest', 's'] def __init__(self, s, workerdest=None, workdir=None, maxsize=None, blocksize=16 * 1024, mode=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") super().__init__(workdir=workdir, **buildstep_kwargs) self.s = s self.workerdest = workerdest self.maxsize = maxsize self.blocksize = blocksize if not isinstance(mode, (int, type(None))): config.error( "StringDownload step's mode must be an integer or None," " got '%s'" % mode) self.mode = mode def start(self): # we use 'downloadFile' remote command on the worker self.checkWorkerHasCommand("downloadFile") self.stdio_log = self.addLog("stdio") # we are currently in the buildmaster's basedir, so any non-absolute # paths will be interpreted relative to that workerdest = self.workerdest log.msg("StringDownload started, from master to worker %r" % workerdest) self.descriptionDone = "downloading to %s" % os.path.basename( workerdest) # setup structures for reading the file fileReader = remotetransfer.StringFileReader(self.s) # default arguments args = { 'maxsize': self.maxsize, 'reader': fileReader, 'blocksize': self.blocksize, 'workdir': self.workdir, 'mode': self.mode, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest cmd = makeStatusRemoteCommand(self, 'downloadFile', args) d = self.runTransferCommand(cmd) d.addCallback(self.finished).addErrback(self.failed) class JSONStringDownload(StringDownload): name = "json_download" def __init__(self, o, workerdest=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") if 's' in buildstep_kwargs: del buildstep_kwargs['s'] s = json.dumps(o) super().__init__(s=s, workerdest=workerdest, **buildstep_kwargs) class JSONPropertiesDownload(StringDownload): name = "json_properties_download" def __init__(self, workerdest=None, **buildstep_kwargs): # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 2 arguments") self.super_class = StringDownload if 's' in buildstep_kwargs: del buildstep_kwargs['s'] super().__init__(s=None, workerdest=workerdest, **buildstep_kwargs) def start(self): properties = self.build.getProperties() props = {} for key, value, source in properties.asList(): props[key] = value self.s = json.dumps(dict( properties=props, sourcestamps=[ss.asDict() for ss in self.build.getAllSourceStamps()], ), ) return self.super_class.start(self) buildbot-2.6.0/master/buildbot/steps/trigger.py000066400000000000000000000324231361162603000215770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.interfaces import ITriggerableScheduler from buildbot.process.buildstep import CANCELLED from buildbot.process.buildstep import EXCEPTION from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import BuildStep from buildbot.process.properties import Properties from buildbot.process.properties import Property from buildbot.process.results import statusToString from buildbot.process.results import worst_status class Trigger(BuildStep): name = "trigger" renderables = [ 'alwaysUseLatest', 'parent_relationship', 'schedulerNames', 'set_properties', 'sourceStamps', 'updateSourceStamp', 'waitForFinish' ] flunkOnFailure = True def __init__(self, schedulerNames=None, sourceStamp=None, sourceStamps=None, updateSourceStamp=None, alwaysUseLatest=False, waitForFinish=False, set_properties=None, copy_properties=None, parent_relationship="Triggered from", unimportantSchedulerNames=None, **kwargs): if schedulerNames is None: schedulerNames = [] if unimportantSchedulerNames is None: unimportantSchedulerNames = [] if not schedulerNames: config.error( "You must specify a scheduler to trigger") if (sourceStamp or sourceStamps) and (updateSourceStamp is not None): config.error( "You can't specify both sourceStamps and updateSourceStamp") if (sourceStamp or sourceStamps) and alwaysUseLatest: config.error( "You can't specify both sourceStamps and alwaysUseLatest") if alwaysUseLatest and (updateSourceStamp is not None): config.error( "You can't specify both alwaysUseLatest and updateSourceStamp" ) if not set(schedulerNames).issuperset(set(unimportantSchedulerNames)): config.error( "unimportantSchedulerNames must be a subset of schedulerNames" ) self.schedulerNames = schedulerNames self.unimportantSchedulerNames = unimportantSchedulerNames self.sourceStamps = sourceStamps or [] if sourceStamp: self.sourceStamps.append(sourceStamp) if updateSourceStamp is not None: self.updateSourceStamp = updateSourceStamp else: self.updateSourceStamp = not (alwaysUseLatest or self.sourceStamps) self.alwaysUseLatest = alwaysUseLatest self.waitForFinish = waitForFinish if set_properties is None: set_properties = {} if copy_properties is None: copy_properties = [] properties = {} properties.update(set_properties) for i in copy_properties: properties[i] = Property(i) self.set_properties = properties self.parent_relationship = parent_relationship self.running = False self.ended = False self.brids = [] self.triggeredNames = None self.waitForFinishDeferred = None super().__init__(**kwargs) def interrupt(self, reason): # We cancel the buildrequests, as the data api handles # both cases: # - build started: stop is sent, # - build not created yet: related buildrequests are set to CANCELLED. # Note that there is an identified race condition though (more details # are available at buildbot.data.buildrequests). for brid in self.brids: self.master.data.control("cancel", {'reason': 'parent build was interrupted'}, ("buildrequests", brid)) if self.running and not self.ended: self.ended = True # if we are interrupted because of a connection lost, we interrupt synchronously if self.build.conn is None and self.waitForFinishDeferred is not None: self.waitForFinishDeferred.cancel() # Create the properties that are used for the trigger def createTriggerProperties(self, properties): # make a new properties object from a dict rendered by the old # properties object trigger_properties = Properties() trigger_properties.update(properties, "Trigger") return trigger_properties def getSchedulerByName(self, name): # we use the fact that scheduler_manager is a multiservice, with schedulers as childs # this allow to quickly find schedulers instance by name schedulers = self.master.scheduler_manager.namedServices if name not in schedulers: raise ValueError("unknown triggered scheduler: %r" % (name,)) sch = schedulers[name] if not ITriggerableScheduler.providedBy(sch): raise ValueError( "triggered scheduler is not ITriggerableScheduler: %r" % (name,)) return sch # This customization endpoint allows users to dynamically select which # scheduler and properties to trigger def getSchedulersAndProperties(self): return [{ 'sched_name': sched, 'props_to_set': self.set_properties, 'unimportant': sched in self.unimportantSchedulerNames} for sched in self.schedulerNames] def prepareSourcestampListForTrigger(self): if self.sourceStamps: ss_for_trigger = {} for ss in self.sourceStamps: codebase = ss.get('codebase', '') assert codebase not in ss_for_trigger, "codebase specified multiple times" ss_for_trigger[codebase] = ss trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())] return trigger_values if self.alwaysUseLatest: return [] # start with the sourcestamps from current build ss_for_trigger = {} objs_from_build = self.build.getAllSourceStamps() for ss in objs_from_build: ss_for_trigger[ss.codebase] = ss.asDict() # overrule revision in sourcestamps with got revision if self.updateSourceStamp: got = self.getAllGotRevisions() for codebase in ss_for_trigger: if codebase in got: ss_for_trigger[codebase]['revision'] = got[codebase] trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())] return trigger_values def getAllGotRevisions(self): all_got_revisions = self.getProperty('got_revision', {}) # For backwards compatibility all_got_revisions is a string if codebases # are not used. Convert to the default internal type (dict) if not isinstance(all_got_revisions, dict): all_got_revisions = {'': all_got_revisions} return all_got_revisions @defer.inlineCallbacks def worstStatus(self, overall_results, rclist, unimportant_brids): for was_cb, results in rclist: if isinstance(results, tuple): results, brids_dict = results if not was_cb: yield self.addLogWithFailure(results) results = EXCEPTION # brids_dict.values() represents the list of brids kicked by a certain scheduler. # We want to ignore the result of ANY brid that was kicked off # by an UNimportant scheduler. if set(unimportant_brids).issuperset(set(brids_dict.values())): continue overall_results = worst_status(overall_results, results) return overall_results @defer.inlineCallbacks def addBuildUrls(self, rclist): brids = {} for was_cb, results in rclist: if isinstance(results, tuple): results, brids = results builderNames = {} if was_cb: # errors were already logged in worstStatus for builderid, br in brids.items(): builds = yield self.master.db.builds.getBuilds(buildrequestid=br) for build in builds: builderid = build['builderid'] # When virtual builders are used, the builderid used for triggering # is not the same as the one that the build actually got if builderid not in builderNames: builderDict = yield self.master.data.get(("builders", builderid)) builderNames[builderid] = builderDict["name"] num = build['number'] url = self.master.status.getURLForBuild(builderid, num) yield self.addURL("%s: %s #%d" % (statusToString(build["results"]), builderNames[builderid], num), url) @defer.inlineCallbacks def run(self): schedulers_and_props = yield self.getSchedulersAndProperties() schedulers_and_props_list = [] # To be back compatible we need to differ between old and new style # schedulers_and_props can either consist of 2 elements tuple or # dictionary for element in schedulers_and_props: if isinstance(element, dict): schedulers_and_props_list = schedulers_and_props break # Old-style back compatibility: Convert tuple to dict and make # it important d = { 'sched_name': element[0], 'props_to_set': element[1], 'unimportant': False } schedulers_and_props_list.append(d) # post process the schedulernames, and raw properties # we do this out of the loop, as this can result in errors schedulers_and_props = [( self.getSchedulerByName(entry_dict['sched_name']), self.createTriggerProperties(entry_dict['props_to_set']), entry_dict['unimportant']) for entry_dict in schedulers_and_props_list] ss_for_trigger = self.prepareSourcestampListForTrigger() dl = [] triggeredNames = [] results = SUCCESS self.running = True unimportant_brids = [] for sch, props_to_set, unimportant in schedulers_and_props: idsDeferred, resultsDeferred = sch.trigger( waited_for=self.waitForFinish, sourcestamps=ss_for_trigger, set_props=props_to_set, parent_buildid=self.build.buildid, parent_relationship=self.parent_relationship ) # we are not in a hurry of starting all in parallel and managing # the deferred lists, just let the db writes be serial. brids = {} try: bsid, brids = yield idsDeferred except Exception as e: yield self.addLogWithException(e) results = EXCEPTION if unimportant: unimportant_brids.extend(brids.values()) self.brids.extend(brids.values()) for brid in brids.values(): # put the url to the brids, so that we can have the status from # the beginning url = self.master.status.getURLForBuildrequest(brid) yield self.addURL("%s #%d" % (sch.name, brid), url) dl.append(resultsDeferred) triggeredNames.append(sch.name) if self.ended: return CANCELLED self.triggeredNames = triggeredNames if self.waitForFinish: self.waitForFinishDeferred = defer.DeferredList(dl, consumeErrors=1) try: rclist = yield self.waitForFinishDeferred except defer.CancelledError: pass # we were interrupted, don't bother update status if self.ended: return CANCELLED yield self.addBuildUrls(rclist) results = yield self.worstStatus(results, rclist, unimportant_brids) else: # do something to handle errors for d in dl: d.addErrback(log.err, '(ignored) while invoking Triggerable schedulers:') return results def getResultSummary(self): if self.ended: return {'step': 'interrupted'} return {'step': self.getCurrentSummary()['step']} if self.triggeredNames else {} def getCurrentSummary(self): if not self.triggeredNames: return {'step': 'running'} return {'step': 'triggered %s' % (', '.join(self.triggeredNames))} buildbot-2.6.0/master/buildbot/steps/vstudio.py000066400000000000000000000413301361162603000216260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Visual studio steps import re from buildbot import config from buildbot.process.buildstep import LogLineObserver from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps.shell import ShellCommand def addEnvPath(env, name, value): """ concat a path for this name """ try: oldval = env[name] if not oldval.endswith(';'): oldval = oldval + ';' except KeyError: oldval = "" if not value.endswith(';'): value = value + ';' env[name] = oldval + value class MSLogLineObserver(LogLineObserver): stdoutDelimiter = "\r\n" stderrDelimiter = "\r\n" _re_delimiter = re.compile(r'^(\d+>)?-{5}.+-{5}$') _re_file = re.compile(r'^(\d+>)?[^ ]+\.(cpp|c)$') _re_warning = re.compile(r' ?: warning [A-Z]+[0-9]+:') _re_error = re.compile(r' ?error ([A-Z]+[0-9]+)?\s?: ') nbFiles = 0 nbProjects = 0 nbWarnings = 0 nbErrors = 0 logwarnings = None logerrors = None def __init__(self, logwarnings, logerrors, **kwargs): super().__init__(**kwargs) self.logwarnings = logwarnings self.logerrors = logerrors def outLineReceived(self, line): if self._re_delimiter.search(line): self.nbProjects += 1 self.logwarnings.addStdout("%s\n" % line) self.logerrors.addStdout("%s\n" % line) self.step.setProgress('projects', self.nbProjects) elif self._re_file.search(line): self.nbFiles += 1 self.step.setProgress('files', self.nbFiles) elif self._re_warning.search(line): self.nbWarnings += 1 self.logwarnings.addStdout("%s\n" % line) self.step.setProgress('warnings', self.nbWarnings) elif self._re_error.search("%s\n" % line): # error has no progress indication self.nbErrors += 1 self.logerrors.addStderr("%s\n" % line) class VisualStudio(ShellCommand): # an *abstract* base class, which will not itself work as a buildstep name = "compile" description = "compiling" descriptionDone = "compile" progressMetrics = (ShellCommand.progressMetrics + ('projects', 'files', 'warnings',)) logobserver = None installdir = None default_installdir = None # One of build, or rebuild mode = "rebuild" projectfile = None config = None useenv = False project = None PATH = [] INCLUDE = [] LIB = [] renderables = ['projectfile', 'config', 'project', 'mode'] def __init__(self, installdir=None, mode="rebuild", projectfile=None, config='release', useenv=False, project=None, INCLUDE=None, LIB=None, PATH=None, **kwargs): if INCLUDE is None: INCLUDE = [] if LIB is None: LIB = [] if PATH is None: PATH = [] self.installdir = installdir self.mode = mode self.projectfile = projectfile self.config = config self.useenv = useenv self.project = project if INCLUDE: self.INCLUDE = INCLUDE self.useenv = True if LIB: self.LIB = LIB self.useenv = True if PATH: self.PATH = PATH # always upcall ! super().__init__(**kwargs) def setupLogfiles(self, cmd, logfiles): logwarnings = self.addLog("warnings") logerrors = self.addLog("errors") self.logobserver = MSLogLineObserver(logwarnings, logerrors) self.addLogObserver('stdio', self.logobserver) super().setupLogfiles(cmd, logfiles) def setupInstalldir(self): if not self.installdir: self.installdir = self.default_installdir def setupEnvironment(self, cmd): super().setupEnvironment(cmd) if cmd.args['env'] is None: cmd.args['env'] = {} # setup the custom one, those one goes first for path in self.PATH: addEnvPath(cmd.args['env'], "PATH", path) for path in self.INCLUDE: addEnvPath(cmd.args['env'], "INCLUDE", path) for path in self.LIB: addEnvPath(cmd.args['env'], "LIB", path) self.setupInstalldir() def describe(self, done=False): description = super().describe(done) if done: if not description: description = ['compile'] description.append( '%d projects' % self.getStatistic('projects', 0)) description.append('%d files' % self.getStatistic('files', 0)) warnings = self.getStatistic('warnings', 0) if warnings > 0: description.append('%d warnings' % warnings) errors = self.getStatistic('errors', 0) if errors > 0: description.append('%d errors' % errors) return description def createSummary(self, log): self.setStatistic('projects', self.logobserver.nbProjects) self.setStatistic('files', self.logobserver.nbFiles) self.setStatistic('warnings', self.logobserver.nbWarnings) self.setStatistic('errors', self.logobserver.nbErrors) def evaluateCommand(self, cmd): if cmd.didFail(): return FAILURE if self.logobserver.nbErrors > 0: return FAILURE if self.logobserver.nbWarnings > 0: return WARNINGS return SUCCESS def finished(self, result): self.getLog("warnings").finish() self.getLog("errors").finish() super().finished(result) class VC6(VisualStudio): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio' def setupEnvironment(self, cmd): super().setupEnvironment(cmd) # Root of Visual Developer Studio Common files. VSCommonDir = self.installdir + '\\Common' MSVCDir = self.installdir + '\\VC98' MSDevDir = VSCommonDir + '\\msdev98' addEnvPath(cmd.args['env'], "PATH", MSDevDir + '\\BIN') addEnvPath(cmd.args['env'], "PATH", MSVCDir + '\\BIN') addEnvPath(cmd.args['env'], "PATH", VSCommonDir + '\\TOOLS\\WINNT') addEnvPath(cmd.args['env'], "PATH", VSCommonDir + '\\TOOLS') addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\INCLUDE') addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\ATL\\INCLUDE') addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\MFC\\INCLUDE') addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\LIB') addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\MFC\\LIB') def start(self): command = [ "msdev", self.projectfile, "/MAKE" ] if self.project is not None: command.append(self.project + " - " + self.config) else: command.append("ALL - " + self.config) if self.mode == "rebuild": command.append("/REBUILD") elif self.mode == "clean": command.append("/CLEAN") else: command.append("/BUILD") if self.useenv: command.append("/USEENV") self.setCommand(command) return super().start() class VC7(VisualStudio): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio .NET 2003' def setupEnvironment(self, cmd): super().setupEnvironment(cmd) VSInstallDir = self.installdir + '\\Common7\\IDE' VCInstallDir = self.installdir MSVCDir = self.installdir + '\\VC7' addEnvPath(cmd.args['env'], "PATH", VSInstallDir) addEnvPath(cmd.args['env'], "PATH", MSVCDir + '\\BIN') addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\Common7\\Tools') addEnvPath( cmd.args['env'], "PATH", VCInstallDir + '\\Common7\\Tools\\bin') addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\INCLUDE') addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\ATLMFC\\INCLUDE') addEnvPath( cmd.args['env'], "INCLUDE", MSVCDir + '\\PlatformSDK\\include') addEnvPath( cmd.args['env'], "INCLUDE", VCInstallDir + '\\SDK\\v1.1\\include') addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\LIB') addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\ATLMFC\\LIB') addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\PlatformSDK\\lib') addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\SDK\\v1.1\\lib') def start(self): command = [ "devenv.com", self.projectfile ] if self.mode == "rebuild": command.append("/Rebuild") elif self.mode == "clean": command.append("/Clean") else: command.append("/Build") command.append(self.config) if self.useenv: command.append("/UseEnv") if self.project is not None: command.append("/Project") command.append(self.project) self.setCommand(command) return super().start() # alias VC7 as VS2003 VS2003 = VC7 class VC8(VC7): # Our ones arch = None default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 8' renderables = ['arch'] def __init__(self, arch="x86", **kwargs): self.arch = arch # always upcall ! super().__init__(**kwargs) def setupEnvironment(self, cmd): # Do not use super() here. We want to override VC7.setupEnvironment(). VisualStudio.setupEnvironment(self, cmd) VSInstallDir = self.installdir VCInstallDir = self.installdir + '\\VC' addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\IDE') if self.arch == "x64": addEnvPath( cmd.args['env'], "PATH", VCInstallDir + '\\BIN\\x86_amd64') addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\BIN') addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\Tools') addEnvPath( cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\Tools\\bin') addEnvPath( cmd.args['env'], "PATH", VCInstallDir + '\\PlatformSDK\\bin') addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\SDK\\v2.0\\bin') addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\VCPackages') addEnvPath(cmd.args['env'], "PATH", r'${PATH}') addEnvPath(cmd.args['env'], "INCLUDE", VCInstallDir + '\\INCLUDE') addEnvPath( cmd.args['env'], "INCLUDE", VCInstallDir + '\\ATLMFC\\include') addEnvPath( cmd.args['env'], "INCLUDE", VCInstallDir + '\\PlatformSDK\\include') archsuffix = '' if self.arch == "x64": archsuffix = '\\amd64' addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\LIB' + archsuffix) addEnvPath( cmd.args['env'], "LIB", VCInstallDir + '\\ATLMFC\\LIB' + archsuffix) addEnvPath( cmd.args['env'], "LIB", VCInstallDir + '\\PlatformSDK\\lib' + archsuffix) addEnvPath( cmd.args['env'], "LIB", VSInstallDir + '\\SDK\\v2.0\\lib' + archsuffix) # alias VC8 as VS2005 VS2005 = VC8 class VCExpress9(VC8): def start(self): command = [ "vcexpress", self.projectfile ] if self.mode == "rebuild": command.append("/Rebuild") elif self.mode == "clean": command.append("/Clean") else: command.append("/Build") command.append(self.config) if self.useenv: command.append("/UseEnv") if self.project is not None: command.append("/Project") command.append(self.project) self.setCommand(command) # Do not use super() here. We want to override VC7.start(). return VisualStudio.start(self) # Add first support for VC9 (Same as VC8, with a different installdir) class VC9(VC8): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 9.0' VS2008 = VC9 # VC10 doesn't look like it needs extra stuff. class VC10(VC9): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 10.0' VS2010 = VC10 # VC11 doesn't look like it needs extra stuff. class VC11(VC10): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 11.0' VS2012 = VC11 # VC12 doesn't look like it needs extra stuff. class VC12(VC11): default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 12.0' VS2013 = VC12 # VC14 doesn't look like it needs extra stuff. class VC14(VC12): default_installdir = 'C:\\Program Files (x86)\\Microsoft Visual Studio 14.0' VS2015 = VC14 class VC141(VC14): default_installdir = r"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community" VS2017 = VC141 class MsBuild4(VisualStudio): platform = None vcenv_bat = r"${VS110COMNTOOLS}..\..\VC\vcvarsall.bat" renderables = ['platform'] def __init__(self, platform, **kwargs): self.platform = platform super().__init__(**kwargs) def setupEnvironment(self, cmd): super().setupEnvironment(cmd) cmd.args['env']['VCENV_BAT'] = self.vcenv_bat def describe(self, done=False): rv = [] if done: rv.append("built") else: rv.append("building") if self.project is not None: rv.append("%s for" % (self.project)) else: rv.append("solution for") rv.append("%s|%s" % (self.config, self.platform)) return rv def start(self): if self.platform is None: config.error( 'platform is mandatory. Please specify a string such as "Win32"') command = ('"%%VCENV_BAT%%" x86 && msbuild "%s" /p:Configuration="%s" /p:Platform="%s" /maxcpucount' % (self.projectfile, self.config, self.platform)) if self.project is not None: command += ' /t:"%s"' % (self.project) elif self.mode == "build": command += ' /t:Build' elif self.mode == "clean": command += ' /t:Clean' elif self.mode == "rebuild": command += ' /t:Rebuild' self.setCommand(command) return super().start() MsBuild = MsBuild4 class MsBuild12(MsBuild4): vcenv_bat = r"${VS120COMNTOOLS}..\..\VC\vcvarsall.bat" class MsBuild14(MsBuild4): vcenv_bat = r"${VS140COMNTOOLS}..\..\VC\vcvarsall.bat" class MsBuild141(VisualStudio): platform = None vcenv_bat = r"\\VC\\Auxiliary\\Build\vcvarsall.bat" renderables = ['platform'] def __init__(self, platform, **kwargs): self.platform = platform super().__init__(**kwargs) def setupEnvironment(self, cmd): super().setupEnvironment(cmd) cmd.args['env']['VCENV_BAT'] = self.vcenv_bat addEnvPath(cmd.args['env'], "PATH", 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\') addEnvPath(cmd.args['env'], "PATH", r'${PATH}') def describe(self, done=False): rv = [] if done: rv.append("built") else: rv.append("building") if self.project is not None: rv.append("%s for" % (self.project)) else: rv.append("solution for") rv.append("%s|%s" % (self.config, self.platform)) return rv def start(self): if self.platform is None: config.error( 'platform is mandatory. Please specify a string such as "Win32"') command = ('FOR /F "tokens=*" %%%%I in (\'vswhere.exe -property installationPath\') do "%%%%I\\%%VCENV_BAT%%" x86 ' '&& msbuild "%s" /p:Configuration="%s" /p:Platform="%s" /maxcpucount' % (self.projectfile, self.config, self.platform)) if self.project is not None: command += ' /t:"%s"' % (self.project) elif self.mode == "build": command += ' /t:Build' elif self.mode == "clean": command += ' /t:Clean' elif self.mode == "rebuild": command += ' /t:Rebuild' self.setCommand(command) return super().start() buildbot-2.6.0/master/buildbot/steps/worker.py000066400000000000000000000270241361162603000214460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.util import bytes2unicode class WorkerBuildStep(buildstep.BuildStep): pass class SetPropertiesFromEnv(WorkerBuildStep): """ Sets properties from environment variables on the worker. Note this is transferred when the worker first connects """ name = 'SetPropertiesFromEnv' description = ['Setting'] descriptionDone = ['Set'] def __init__(self, variables, source="WorkerEnvironment", **kwargs): super().__init__(**kwargs) self.variables = variables self.source = source def start(self): # on Windows, environment variables are case-insensitive, but we have # a case-sensitive dictionary in worker_environ. Fortunately, that # dictionary is also folded to uppercase, so we can simply fold the # variable names to uppercase to duplicate the case-insensitivity. fold_to_uppercase = (self.worker.worker_system == 'win32') properties = self.build.getProperties() environ = self.worker.worker_environ variables = self.variables log = [] if isinstance(variables, str): variables = [self.variables] for variable in variables: key = variable if fold_to_uppercase: key = variable.upper() value = environ.get(key, None) if value: # note that the property is not uppercased properties.setProperty(variable, value, self.source, runtime=True) log.append("%s = %r" % (variable, value)) self.addCompleteLog("properties", "\n".join(log)) self.finished(SUCCESS) class FileExists(WorkerBuildStep): """ Check for the existence of a file on the worker. """ name = 'FileExists' renderables = ['file'] haltOnFailure = True flunkOnFailure = True def __init__(self, file, **kwargs): super().__init__(**kwargs) self.file = file def start(self): self.checkWorkerHasCommand('stat') cmd = remotecommand.RemoteCommand('stat', {'file': self.file}) d = self.runCommand(cmd) d.addCallback(lambda res: self.commandComplete(cmd)) d.addErrback(self.failed) def commandComplete(self, cmd): if cmd.didFail(): self.descriptionDone = ["File not found."] self.finished(FAILURE) return s = cmd.updates["stat"][-1] if stat.S_ISREG(s[stat.ST_MODE]): self.descriptionDone = ["File found."] self.finished(SUCCESS) else: self.descriptionDone = ["Not a file."] self.finished(FAILURE) class CopyDirectory(WorkerBuildStep): """ Copy a directory tree on the worker. """ name = 'CopyDirectory' description = ['Copying'] descriptionDone = ['Copied'] renderables = ['src', 'dest'] haltOnFailure = True flunkOnFailure = True def __init__(self, src, dest, timeout=None, maxTime=None, **kwargs): super().__init__(**kwargs) self.src = src self.dest = dest self.timeout = timeout self.maxTime = maxTime def start(self): self.checkWorkerHasCommand('cpdir') args = {'fromdir': self.src, 'todir': self.dest} if self.timeout: args['timeout'] = self.timeout if self.maxTime: args['maxTime'] = self.maxTime cmd = remotecommand.RemoteCommand('cpdir', args) d = self.runCommand(cmd) d.addCallback(lambda res: self.commandComplete(cmd)) d.addErrback(self.failed) def commandComplete(self, cmd): if cmd.didFail(): self.step_status.setText(["Copying", self.src, "to", self.dest, "failed."]) self.finished(FAILURE) return self.step_status.setText(self.describe(done=True)) self.finished(SUCCESS) # TODO: BuildStep subclasses don't have a describe().... def getResultSummary(self): src = bytes2unicode(self.src, errors='replace') dest = bytes2unicode(self.dest, errors='replace') copy = "{} to {}".format(src, dest) if self.results == SUCCESS: rv = 'Copied ' + copy else: rv = 'Copying ' + copy + ' failed.' return {'step': rv} class RemoveDirectory(WorkerBuildStep): """ Remove a directory tree on the worker. """ name = 'RemoveDirectory' description = ['Deleting'] descriptionDone = ['Deleted'] renderables = ['dir'] haltOnFailure = True flunkOnFailure = True def __init__(self, dir, **kwargs): super().__init__(**kwargs) self.dir = dir def start(self): self.checkWorkerHasCommand('rmdir') cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.dir}) d = self.runCommand(cmd) d.addCallback(lambda res: self.commandComplete(cmd)) d.addErrback(self.failed) def commandComplete(self, cmd): if cmd.didFail(): self.step_status.setText(["Delete failed."]) self.finished(FAILURE) return self.finished(SUCCESS) class MakeDirectory(WorkerBuildStep): """ Create a directory on the worker. """ name = 'MakeDirectory' description = ['Creating'] descriptionDone = ['Created'] renderables = ['dir'] haltOnFailure = True flunkOnFailure = True def __init__(self, dir, **kwargs): super().__init__(**kwargs) self.dir = dir def start(self): self.checkWorkerHasCommand('mkdir') cmd = remotecommand.RemoteCommand('mkdir', {'dir': self.dir}) d = self.runCommand(cmd) d.addCallback(lambda res: self.commandComplete(cmd)) d.addErrback(self.failed) def commandComplete(self, cmd): if cmd.didFail(): self.step_status.setText(["Create failed."]) self.finished(FAILURE) return self.finished(SUCCESS) class CompositeStepMixin(): def workerPathToMasterPath(self, path): return os.path.join(*self.worker.path_module.split(path)) def addLogForRemoteCommands(self, logname): """This method must be called by user classes composite steps could create several logs, this mixin functions will write to the last one. """ self.rc_log = self.addLog(logname) return self.rc_log def runRemoteCommand(self, cmd, args, abandonOnFailure=True, evaluateCommand=lambda cmd: cmd.didFail()): """generic RemoteCommand boilerplate""" cmd = remotecommand.RemoteCommand(cmd, args) if hasattr(self, "rc_log"): cmd.useLog(self.rc_log, False) d = self.runCommand(cmd) def commandComplete(cmd): if abandonOnFailure and cmd.didFail(): raise buildstep.BuildStepFailed() return evaluateCommand(cmd) d.addCallback(lambda res: commandComplete(cmd)) return d def runRmdir(self, dir, timeout=None, **kwargs): """ remove a directory from the worker """ cmd_args = {'dir': dir, 'logEnviron': self.logEnviron} if timeout: cmd_args['timeout'] = timeout return self.runRemoteCommand('rmdir', cmd_args, **kwargs) def runRmFile(self, path, timeout=None, **kwargs): """ remove a file from the worker """ cmd_args = {'path': path, 'logEnviron': self.logEnviron} if timeout: cmd_args['timeout'] = timeout if self.workerVersionIsOlderThan('rmfile', '3.1'): cmd_args['dir'] = os.path.abspath(path) return self.runRemoteCommand('rmdir', cmd_args, **kwargs) return self.runRemoteCommand('rmfile', cmd_args, **kwargs) def pathExists(self, path): """ test whether path exists""" def commandComplete(cmd): return not cmd.didFail() return self.runRemoteCommand('stat', {'file': path, 'logEnviron': self.logEnviron, }, abandonOnFailure=False, evaluateCommand=commandComplete) def runMkdir(self, _dir, **kwargs): """ create a directory and its parents""" return self.runRemoteCommand('mkdir', {'dir': _dir, 'logEnviron': self.logEnviron, }, **kwargs) def runGlob(self, path, **kwargs): """ find files matching a shell-style pattern""" def commandComplete(cmd): return cmd.updates['files'][-1] return self.runRemoteCommand('glob', {'path': path, 'logEnviron': self.logEnviron, }, evaluateCommand=commandComplete, **kwargs) def getFileContentFromWorker(self, filename, abandonOnFailure=False): self.checkWorkerHasCommand("uploadFile") fileWriter = remotetransfer.StringFileWriter() # default arguments args = { 'workdir': self.workdir, 'writer': fileWriter, 'maxsize': None, 'blocksize': 32 * 1024, } if self.workerVersionIsOlderThan('uploadFile', '3.0'): args['slavesrc'] = filename else: args['workersrc'] = filename def commandComplete(cmd): if cmd.didFail(): return None return fileWriter.buffer return self.runRemoteCommand('uploadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete) def downloadFileContentToWorker(self, workerdest, strfile, abandonOnFailure=False, mode=None, workdir=None): if workdir is None: workdir = self.workdir self.checkWorkerHasCommand("downloadFile") fileReader = remotetransfer.StringFileReader(strfile) # default arguments args = { 'workdir': workdir, 'maxsize': None, 'mode': mode, 'reader': fileReader, 'blocksize': 32 * 1024, } if self.workerVersionIsOlderThan('downloadFile', '3.0'): args['slavedest'] = workerdest else: args['workerdest'] = workerdest def commandComplete(cmd): if cmd.didFail(): return None return fileReader return self.runRemoteCommand('downloadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete) buildbot-2.6.0/master/buildbot/test/000077500000000000000000000000001361162603000173775ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/__init__.py000066400000000000000000000117271361162603000215200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import warnings from distutils.version import LooseVersion from buildbot import monkeypatches # import mock so we bail out early if it's not installed try: import mock [mock] except ImportError: raise ImportError("\nBuildbot tests require the 'mock' module; " "try 'pip install mock'") # apply the same patches the buildmaster does when it starts monkeypatches.patch_all(for_tests=True) # enable deprecation warnings warnings.filterwarnings('always', category=DeprecationWarning) if LooseVersion(mock.__version__) < LooseVersion("0.8"): raise ImportError("\nBuildbot tests require mock version 0.8.0 or " "higher; try 'pip install -U mock'") # All deprecated modules should be loaded, consider future warnings in tests as errors. # In order to not pollute the test outputs, # warnings in tests shall be forcefully tested with assertProducesWarning, # or shutdown using the warning module warnings.filterwarnings('error') # if buildbot_worker is installed in pip install -e mode, then the docker directory will # match "import docker", and produce a warning. # We just suppress this warning instead of doing silly workaround. warnings.filterwarnings('ignore', "Not importing directory.*docker': missing __init__.py", category=ImportWarning) # FIXME: needs to be sorted out (#3666) warnings.filterwarnings('ignore', "1300 Invalid utf8 character string") # twisted.compat.execfile is using 'U' https://twistedmatrix.com/trac/ticket/9023 warnings.filterwarnings('ignore', "'U' mode is deprecated", DeprecationWarning) # sqlalchemy.migrate is calling inspect.getargspec() # https://bugs.launchpad.net/sqlalchemy-migrate/+bug/1662472 warnings.filterwarnings('ignore', r"inspect.getargspec\(\) is deprecated") # twisted.python.filepath and trial are using bytes file paths when # the "native" file path (Unicode) should be used on Windows. warnings.filterwarnings('ignore', "The Windows bytes API has been " "deprecated, use Unicode filenames instead") # moto warning v1.0.0 warnings.filterwarnings('ignore', "Flags not at the start of the expression") warnings.filterwarnings('ignore', r"object\(\) takes no parameters") # this warning happens sometimes on python3.4 warnings.filterwarnings('ignore', r"The value of convert_charrefs will become True in 3.5") # Twisted 18.4+ adds a deprecation warning and still use the deprecated API in its own code! warnings.filterwarnings('ignore', ".*getClientIP was deprecated.*", DeprecationWarning) # Python 3.7 adds a deprecation warning formatargspec. # The signature api that replaces it is not available in 2.7 warnings.filterwarnings('ignore', ".*`formatargspec` is deprecated.*", DeprecationWarning) # Python 3.7 adds a deprecation importing ABCs from collection. # Such imports are made in dependencies (e.g moto, werzeug, pyparsing) warnings.filterwarnings('ignore', ".*Using or importing the ABCs from 'collections'.*", DeprecationWarning) # more 3.7 warning from moto warnings.filterwarnings('ignore', r".*Use 'list\(elem\)' or iteration over elem instead.*", DeprecationWarning) # ignore ResourceWarnings for unclosed sockets for the pg8000 driver on Python 3+ (tech debt: #4508) if sys.version_info[0] >= 3 and "pg8000" in os.getenv("BUILDBOT_TEST_DB_URL", ""): warnings.filterwarnings('ignore', ".*unclosed .*socket", ResourceWarning) # Python 3.5 on CircleCI shows this warning warnings.filterwarnings('ignore', ".*the imp module is deprecated in favour of importlib*") # sqlalchemy-migrate uses deprecated api from sqlalchemy https://review.openstack.org/#/c/648072/ warnings.filterwarnings('ignore', ".*Engine.contextual_connect.*", DeprecationWarning) # ignore an attrs API warning for APIs used in dependencies warnings.filterwarnings('ignore', ".*The usage of `cmp` is deprecated and will be removed " "on or after.*", DeprecationWarning) # ignore a warning emitted by pkg_resources when importing certain namespace packages warnings.filterwarnings('ignore', ".*Not importing directory .*/zope: missing __init__", category=ImportWarning) warnings.filterwarnings('ignore', ".*Not importing directory .*/sphinxcontrib: missing __init__", category=ImportWarning) buildbot-2.6.0/master/buildbot/test/fake/000077500000000000000000000000001361162603000203055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/fake/__init__.py000066400000000000000000000000001361162603000224040ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/fake/botmaster.py000066400000000000000000000032721361162603000226630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import botmaster from buildbot.util import service class FakeBotMaster(service.AsyncMultiService, botmaster.LockRetrieverMixin): def __init__(self): super().__init__() self.setName("fake-botmaster") self.builders = {} # dictionary mapping worker names to builders self.buildsStartedForWorkers = [] self.delayShutdown = False def getBuildersForWorker(self, workername): return self.builders.get(workername, []) def maybeStartBuildsForWorker(self, workername): self.buildsStartedForWorkers.append(workername) def maybeStartBuildsForAllBuilders(self): self.buildsStartedForWorkers += self.builders.keys() def workerLost(self, bot): pass def cleanShutdown(self, quickMode=False, stopReactor=True): self.shuttingDown = True if self.delayShutdown: self.shutdownDeferred = defer.Deferred() return self.shutdownDeferred buildbot-2.6.0/master/buildbot/test/fake/bworkermanager.py000066400000000000000000000052131361162603000236660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class FakeWorkerManager(service.AsyncMultiService): def __init__(self): super().__init__() self.setName('workers') # WorkerRegistration instances keyed by worker name self.registrations = {} # connection objects keyed by worker name self.connections = {} # self.workers contains a ready Worker instance for each # potential worker, i.e. all the ones listed in the config file. # If the worker is connected, self.workers[workername].worker will # contain a RemoteReference to their Bot instance. If it is not # connected, that attribute will hold None. self.workers = {} # maps workername to Worker def register(self, worker): workerName = worker.workername reg = FakeWorkerRegistration(worker) self.registrations[workerName] = reg return defer.succeed(reg) def _unregister(self, registration): del self.registrations[registration.worker.workername] def getWorkerByName(self, workerName): return self.registrations[workerName].worker def newConnection(self, conn, workerName): assert workerName not in self.connections self.connections[workerName] = conn conn.info = {} def remove(): del self.connections[workerName] return defer.succeed(True) class FakeWorkerRegistration: def __init__(self, worker): self.updates = [] self.unregistered = False self.worker = worker def getPBPort(self): return 1234 def unregister(self): assert not self.unregistered, "called twice" self.unregistered = True return defer.succeed(None) def update(self, worker_config, global_config): if worker_config.workername not in self.updates: self.updates.append(worker_config.workername) return defer.succeed(None) buildbot-2.6.0/master/buildbot/test/fake/change.py000066400000000000000000000021741361162603000221100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.properties import Properties from buildbot.test.fake.state import State class Change(State): project = '' repository = '' branch = '' category = '' codebase = '' properties = {} def __init__(self, **kw): super().__init__(**kw) # change.properties is a IProperties props = Properties() props.update(self.properties, "test") self.properties = props buildbot-2.6.0/master/buildbot/test/fake/docker.py000066400000000000000000000066561361162603000221430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members version = "1.10.6" class Client: latest = None def __init__(self, base_url): Client.latest = self self.call_args_create_container = [] self.call_args_create_host_config = [] self.called_class_name = None self._images = [ {'RepoTags': ['busybox:latest', 'worker:latest', 'tester:latest']}] self._pullable = ['alpine:latest', 'tester:latest'] self._pullCount = 0 self._containers = {} def images(self): return self._images def start(self, container): pass def stop(self, id): pass def wait(self, id): return 0 def build(self, fileobj, tag): if fileobj.read() == b'BUG': pass else: logs = [] for line in logs: yield line self._images.append({'RepoTags': [tag + ':latest']}) def pull(self, image, *args, **kwargs): if image in self._pullable: self._pullCount += 1 self._images.append({'RepoTags': [image]}) def containers(self, filters=None, *args, **kwargs): if filters is not None: if 'existing' in filters.get('name', ''): self.create_container( image='busybox:latest', name="buildbot-existing-87de7e" ) self.create_container( image='busybox:latest', name="buildbot-existing-87de7ef" ) return [ c for c in self._containers.values() if c['name'].startswith(filters['name']) ] return self._containers.values() def create_host_config(self, *args, **kwargs): self.call_args_create_host_config.append(kwargs) def create_container(self, image, *args, **kwargs): self.call_args_create_container.append(kwargs) self.called_class_name = self.__class__.__name__ name = kwargs.get('name', None) if 'buggy' in image: raise Exception('we could not create this container') for c in self._containers.values(): if c['name'] == name: raise Exception('cannot create with same name') ret = { 'Id': '8a61192da2b3bb2d922875585e29b74ec0dc4e0117fcbf84c962204e97564cd7', 'Warnings': None } self._containers[ret['Id']] = { 'started': False, 'image': image, 'Id': ret['Id'], 'name': name, # docker does not return this 'Names': [name] # this what docker returns } return ret def remove_container(self, id, **kwargs): del self._containers[id] class APIClient(Client): pass buildbot-2.6.0/master/buildbot/test/fake/endpoint.py000066400000000000000000000057131361162603000225050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This is a static resource type and set of endpoints used as common data by # tests. from twisted.internet import defer from buildbot.data import base from buildbot.data import types testData = { 13: {'id': 13, 'info': 'ok', 'success': True, 'tags': []}, 14: {'id': 14, 'info': 'failed', 'success': False, 'tags': []}, 15: {'id': 15, 'info': 'warned', 'success': True, 'tags': ['a', 'b', ]}, 16: {'id': 16, 'info': 'skipped', 'success': True, 'tags': ['a']}, 17: {'id': 17, 'info': 'ignored', 'success': True, 'tags': []}, 18: {'id': 18, 'info': 'unexp', 'success': False, 'tags': []}, 19: {'id': 19, 'info': 'todo', 'success': True, 'tags': []}, 20: {'id': 20, 'info': 'error', 'success': False, 'tags': []}, } class TestsEndpoint(base.Endpoint): isCollection = True pathPatterns = "/test" def get(self, resultSpec, kwargs): # results are sorted by ID for test stability return defer.succeed(sorted(testData.values(), key=lambda v: v['id'])) class RawTestsEndpoint(base.Endpoint): isCollection = False isRaw = True pathPatterns = "/rawtest" def get(self, resultSpec, kwargs): return defer.succeed({ "filename": "test.txt", "mime-type": "text/test", 'raw': 'value' }) class FailEndpoint(base.Endpoint): isCollection = False pathPatterns = "/test/fail" def get(self, resultSpec, kwargs): return defer.fail(RuntimeError('oh noes')) class TestEndpoint(base.Endpoint): isCollection = False pathPatterns = "/test/n:testid" def get(self, resultSpec, kwargs): if kwargs['testid'] == 0: return None return defer.succeed(testData[kwargs['testid']]) def control(self, action, args, kwargs): if action == "fail": return defer.fail(RuntimeError("oh noes")) return defer.succeed({'action': action, 'args': args, 'kwargs': kwargs}) class Test(base.ResourceType): name = "test" plural = "tests" endpoints = [TestsEndpoint, TestEndpoint, FailEndpoint, RawTestsEndpoint] class EntityType(types.Entity): id = types.Integer() info = types.String() success = types.Boolean() tags = types.List(of=types.String()) entityType = EntityType(name) buildbot-2.6.0/master/buildbot/test/fake/fakebuild.py000066400000000000000000000076561361162603000226230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import posixpath import mock from twisted.python import components from buildbot import config from buildbot import interfaces from buildbot.process import factory from buildbot.process import properties from buildbot.process import workerforbuilder from buildbot.test.fake import fakemaster from buildbot.worker import base class FakeBuildStatus(properties.PropertiesMixin): def __init__(self): self.properties = properties.Properties() def getInterestedUsers(self): return [] def setWorkername(self, _): pass def setSourceStamps(self, _): pass def setReason(self, _): pass def setBlamelist(self, _): pass def buildStarted(self, _): return True setText = mock.Mock() setText2 = mock.Mock() setResults = mock.Mock() def buildFinished(self): pass getBuilder = mock.Mock() components.registerAdapter( lambda build_status: build_status.properties, FakeBuildStatus, interfaces.IProperties) class FakeWorkerStatus(properties.PropertiesMixin): def __init__(self, name): self.name = name self.info = properties.Properties() self.info.setProperty("test", "test", "Worker") class FakeBuild(properties.PropertiesMixin): def __init__(self, props=None, master=None): self.build_status = FakeBuildStatus() self.builder = fakemaster.FakeBuilderStatus(master) self.workerforbuilder = mock.Mock( spec=workerforbuilder.WorkerForBuilder) self.workerforbuilder.worker = mock.Mock(spec=base.Worker) self.workerforbuilder.worker.worker_status = FakeWorkerStatus("mock") self.builder.config = config.BuilderConfig( name='bldr', workernames=['a'], factory=factory.BuildFactory()) self.path_module = posixpath self.buildid = 92 self.number = 13 self.workdir = 'build' self.locks = [] self.sources = {} if props is None: props = properties.Properties() props.build = self self.build_status.properties = props self.properties = props self.master = None self.config_version = 0 def getSourceStamp(self, codebase): if codebase in self.sources: return self.sources[codebase] return None def getAllSourceStamps(self): return list(self.sources.values()) def allChanges(self): for s in self.sources.values(): for c in s.changes: yield c def allFiles(self): files = [] for c in self.allChanges(): for f in c.files: files.append(f) return files def getBuilder(self): return self.builder def getWorkerInfo(self): return self.workerforbuilder.worker.worker_status.info components.registerAdapter( lambda build: build.build_status.properties, FakeBuild, interfaces.IProperties) class FakeBuildForRendering: def render(self, r): if isinstance(r, str): return "rendered:" + r if isinstance(r, list): return list(self.render(i) for i in r) if isinstance(r, tuple): return tuple(self.render(i) for i in r) return r buildbot-2.6.0/master/buildbot/test/fake/fakedata.py000066400000000000000000000506531361162603000224300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.python import failure from buildbot.data import connector from buildbot.db.buildrequests import AlreadyClaimedError from buildbot.test.util import validation from buildbot.util import service class FakeUpdates(service.AsyncService): # unlike "real" update methods, all of the fake methods are here in a # single class. def __init__(self, testcase): self.testcase = testcase # test cases should assert the values here: self.changesAdded = [] # Changes are numbered starting at 1. # { name : id }; users can add changesources here self.changesourceIds = {} self.buildsetsAdded = [] # Buildsets are numbered starting at 1 self.maybeBuildsetCompleteCalls = 0 self.masterStateChanges = [] # dictionaries self.schedulerIds = {} # { name : id }; users can add schedulers here self.builderIds = {} # { name : id }; users can add schedulers here self.schedulerMasters = {} # { schedulerid : masterid } self.changesourceMasters = {} # { changesourceid : masterid } self.workerIds = {} # { name : id }; users can add workers here # { logid : {'finished': .., 'name': .., 'type': .., 'content': [ .. ]} } self.logs = {} self.claimedBuildRequests = set([]) self.stepStateString = {} # { stepid : string } self.stepUrls = {} # { stepid : [(name,url)] } self.properties = [] self.missingWorkers = [] # extra assertions def assertProperties(self, sourced, properties): self.testcase.assertIsInstance(properties, dict) for k, v in properties.items(): self.testcase.assertIsInstance(k, str) if sourced: self.testcase.assertIsInstance(v, tuple) self.testcase.assertEqual(len(v), 2) propval, propsrc = v self.testcase.assertIsInstance(propsrc, str) else: propval = v try: json.dumps(propval) except (TypeError, ValueError): self.testcase.fail("value for %s is not JSON-able" % (k,)) # update methods def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): if properties is None: properties = {} # double-check args, types, etc. if files is not None: self.testcase.assertIsInstance(files, list) map(lambda f: self.testcase.assertIsInstance(f, str), files) self.testcase.assertIsInstance(comments, (type(None), str)) self.testcase.assertIsInstance(author, (type(None), str)) self.testcase.assertIsInstance(committer, (type(None), str)) self.testcase.assertIsInstance(revision, (type(None), str)) self.testcase.assertIsInstance(when_timestamp, (type(None), int)) self.testcase.assertIsInstance(branch, (type(None), str)) if callable(category): pre_change = self.master.config.preChangeGenerator(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when_timestamp, branch=branch, revlink=revlink, properties=properties, repository=repository, project=project) category = category(pre_change) self.testcase.assertIsInstance(category, (type(None), str)) self.testcase.assertIsInstance(revlink, (type(None), str)) self.assertProperties(sourced=False, properties=properties) self.testcase.assertIsInstance(repository, str) self.testcase.assertIsInstance(codebase, (type(None), str)) self.testcase.assertIsInstance(project, str) self.testcase.assertIsInstance(src, (type(None), str)) # use locals() to ensure we get all of the args and don't forget if # more are added self.changesAdded.append(locals()) self.changesAdded[-1].pop('self') return defer.succeed(len(self.changesAdded)) def masterActive(self, name, masterid): self.testcase.assertIsInstance(name, str) self.testcase.assertIsInstance(masterid, int) if masterid: self.testcase.assertEqual(masterid, 1) self.thisMasterActive = True return defer.succeed(None) def masterStopped(self, name, masterid): self.testcase.assertIsInstance(name, str) self.testcase.assertEqual(masterid, 1) self.thisMasterActive = False return defer.succeed(None) def expireMasters(self, forceHouseKeeping=False): return defer.succeed(None) @defer.inlineCallbacks def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): if sourcestamps is None: sourcestamps = [] if properties is None: properties = {} if builderids is None: builderids = [] # assert types self.testcase.assertIsInstance(scheduler, str) self.testcase.assertIsInstance(sourcestamps, list) for ss in sourcestamps: if not isinstance(ss, int) and not isinstance(ss, dict): self.testcase.fail("%s (%s) is not an integer or a dictionary" % (ss, type(ss))) del ss # since we use locals(), below self.testcase.assertIsInstance(reason, str) self.assertProperties(sourced=True, properties=properties) self.testcase.assertIsInstance(builderids, list) self.testcase.assertIsInstance(external_idstring, (type(None), str)) self.buildsetsAdded.append(locals()) self.buildsetsAdded[-1].pop('self') # call through to the db layer, since many scheduler tests expect to # find the buildset in the db later - TODO fix this! bsid, brids = yield self.master.db.buildsets.addBuildset( sourcestamps=sourcestamps, reason=reason, properties=properties, builderids=builderids, waited_for=waited_for, external_idstring=external_idstring, parent_buildid=parent_buildid, parent_relationship=parent_relationship) return (bsid, brids) def maybeBuildsetComplete(self, bsid): self.maybeBuildsetCompleteCalls += 1 return defer.succeed(None) @defer.inlineCallbacks def claimBuildRequests(self, brids, claimed_at=None): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) validation.verifyType(self.testcase, 'claimed_at', claimed_at, validation.NoneOk(validation.DateTimeValidator())) if not brids: return True try: yield self.master.db.buildrequests.claimBuildRequests( brids=brids, claimed_at=claimed_at) except AlreadyClaimedError: return False self.claimedBuildRequests.update(set(brids)) return True @defer.inlineCallbacks def unclaimBuildRequests(self, brids): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) self.claimedBuildRequests.difference_update(set(brids)) if brids: yield self.master.db.buildrequests.unclaimBuildRequests(brids) def completeBuildRequests(self, brids, results, complete_at=None): validation.verifyType(self.testcase, 'brids', brids, validation.ListValidator(validation.IntValidator())) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) validation.verifyType(self.testcase, 'complete_at', complete_at, validation.NoneOk(validation.DateTimeValidator())) return defer.succeed(True) def rebuildBuildrequest(self, buildrequest): return defer.succeed(None) def updateBuilderList(self, masterid, builderNames): self.testcase.assertEqual(masterid, self.master.masterid) for n in builderNames: self.testcase.assertIsInstance(n, str) self.builderNames = builderNames return defer.succeed(None) def updateBuilderInfo(self, builderid, description, tags): yield self.master.db.builders.updateBuilderInfo(builderid, description, tags) def masterDeactivated(self, masterid): return defer.succeed(None) def findSchedulerId(self, name): return self.master.db.schedulers.findSchedulerId(name) def forget_about_it(self, name): validation.verifyType(self.testcase, 'scheduler name', name, validation.StringValidator()) if name not in self.schedulerIds: self.schedulerIds[name] = max( [0] + list(self.schedulerIds.values())) + 1 return defer.succeed(self.schedulerIds[name]) def findChangeSourceId(self, name): validation.verifyType(self.testcase, 'changesource name', name, validation.StringValidator()) if name not in self.changesourceIds: self.changesourceIds[name] = max( [0] + list(self.changesourceIds.values())) + 1 return defer.succeed(self.changesourceIds[name]) def findBuilderId(self, name): validation.verifyType(self.testcase, 'builder name', name, validation.StringValidator()) return self.master.db.builders.findBuilderId(name) def trySetSchedulerMaster(self, schedulerid, masterid): currentMasterid = self.schedulerMasters.get(schedulerid) if isinstance(currentMasterid, Exception): return defer.fail(failure.Failure( currentMasterid)) if currentMasterid and masterid is not None: return defer.succeed(False) self.schedulerMasters[schedulerid] = masterid return defer.succeed(True) def trySetChangeSourceMaster(self, changesourceid, masterid): currentMasterid = self.changesourceMasters.get(changesourceid) if isinstance(currentMasterid, Exception): return defer.fail(failure.Failure( currentMasterid)) if currentMasterid and masterid is not None: return defer.succeed(False) self.changesourceMasters[changesourceid] = masterid return defer.succeed(True) def addBuild(self, builderid, buildrequestid, workerid): validation.verifyType(self.testcase, 'builderid', builderid, validation.IntValidator()) validation.verifyType(self.testcase, 'buildrequestid', buildrequestid, validation.IntValidator()) validation.verifyType(self.testcase, 'workerid', workerid, validation.IntValidator()) return defer.succeed((10, 1)) def generateNewBuildEvent(self, buildid): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) return defer.succeed(None) def setBuildStateString(self, buildid, state_string): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'state_string', state_string, validation.StringValidator()) return defer.succeed(None) def finishBuild(self, buildid, results): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) return defer.succeed(None) def setBuildProperty(self, buildid, name, value, source): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) try: json.dumps(value) except (TypeError, ValueError): self.testcase.fail("Value for %s is not JSON-able" % name) validation.verifyType(self.testcase, 'source', source, validation.StringValidator()) return defer.succeed(None) @defer.inlineCallbacks def setBuildProperties(self, buildid, properties): for k, v, s in properties.getProperties().asList(): self.properties.append((buildid, k, v, s)) yield self.setBuildProperty(buildid, k, v, s) def addStep(self, buildid, name): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.IdentifierValidator(50)) return defer.succeed((10, 1, name)) def addStepURL(self, stepid, name, url): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) validation.verifyType(self.testcase, 'url', url, validation.StringValidator()) self.stepUrls.setdefault(stepid, []).append((name, url)) return defer.succeed(None) def startStep(self, stepid): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) return defer.succeed(None) def setStepStateString(self, stepid, state_string): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'state_string', state_string, validation.StringValidator()) self.stepStateString[stepid] = state_string return defer.succeed(None) def finishStep(self, stepid, results, hidden): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'results', results, validation.IntValidator()) validation.verifyType(self.testcase, 'hidden', hidden, validation.BooleanValidator()) return defer.succeed(None) def addLog(self, stepid, name, type): validation.verifyType(self.testcase, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) validation.verifyType(self.testcase, 'type', type, validation.IdentifierValidator(1)) logid = max([0] + list(self.logs)) + 1 self.logs[logid] = dict( name=name, type=type, content=[], finished=False) return defer.succeed(logid) def finishLog(self, logid): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) self.logs[logid]['finished'] = True return defer.succeed(None) def compressLog(self, logid): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) return defer.succeed(None) def appendLog(self, logid, content): validation.verifyType(self.testcase, 'logid', logid, validation.IntValidator()) validation.verifyType(self.testcase, 'content', content, validation.StringValidator()) self.testcase.assertEqual(content[-1], '\n') self.logs[logid]['content'].append(content) return defer.succeed(None) def findWorkerId(self, name): validation.verifyType(self.testcase, 'worker name', name, validation.IdentifierValidator(50)) # this needs to actually get inserted into the db (fake or real) since # getWorker will get called later return self.master.db.workers.findWorkerId(name) def workerConnected(self, workerid, masterid, workerinfo): return self.master.db.workers.workerConnected( workerid=workerid, masterid=masterid, workerinfo=workerinfo) def workerConfigured(self, workerid, masterid, builderids): return self.master.db.workers.workerConfigured( workerid=workerid, masterid=masterid, builderids=builderids) def workerDisconnected(self, workerid, masterid): return self.master.db.workers.workerDisconnected( workerid=workerid, masterid=masterid) def deconfigureAllWorkersForMaster(self, masterid): return self.master.db.workers.deconfigureAllWorkersForMaster( masterid=masterid) def workerMissing(self, workerid, masterid, last_connection, notify): self.missingWorkers.append((workerid, masterid, last_connection, notify)) def schedulerEnable(self, schedulerid, v): return self.master.db.schedulers.enable(schedulerid, v) def setWorkerState(self, workerid, paused, graceful): return self.master.db.workers.setWorkerState( workerid=workerid, paused=paused, graceful=graceful) class FakeDataConnector(service.AsyncMultiService): # FakeDataConnector delegates to the real DataConnector so it can get all # of the proper getter and consumer behavior; it overrides all of the # relevant updates with fake methods, though. def __init__(self, master, testcase): super().__init__() self.setServiceParent(master) self.updates = FakeUpdates(testcase) self.updates.setServiceParent(self) # get and control are delegated to a real connector, # after some additional assertions self.realConnector = connector.DataConnector() self.realConnector.setServiceParent(self) self.rtypes = self.realConnector.rtypes def _scanModule(self, mod): return self.realConnector._scanModule(mod) def getEndpoint(self, path): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.getEndpoint(path) def getResourceType(self, name): return getattr(self.rtypes, name) def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.get(path, filters=filters, fields=fields, order=order, limit=limit, offset=offset) def control(self, action, args, path): if not isinstance(path, tuple): raise TypeError('path must be a tuple') return self.realConnector.control(action, args, path) buildbot-2.6.0/master/buildbot/test/fake/fakedb.py000066400000000000000000002502421361162603000221000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ A complete re-implementation of the database connector components, but without using a database. These classes should pass the same tests as are applied to the real connector components. """ import base64 import copy import hashlib import json from twisted.internet import defer from buildbot.data import resultspec from buildbot.db import buildrequests from buildbot.db import buildsets from buildbot.db import changesources from buildbot.db import schedulers from buildbot.test.util import validation from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime from buildbot.util import service from buildbot.util import unicode2bytes # Fake DB Rows class Row: """ Parent class for row classes, which are used to specify test data for database-related tests. @cvar defaults: default values for columns @type defaults: dictionary @cvar table: the table name @cvar id_column: specify a column that should be assigned an auto-incremented id. Auto-assigned id's begin at 1000, so any explicitly specified ID's should be less than 1000. @cvar required_columns: a tuple of columns that must be given in the constructor @cvar hashedColumns: a tuple of hash column and source columns designating a hash to work around MySQL's inability to do indexing. @ivar values: the values to be inserted into this row """ id_column = () required_columns = () lists = () dicts = () hashedColumns = [] foreignKeys = [] # Columns that content is represented as sa.Binary-like type in DB model. # They value is bytestring (in contrast to text-like columns, which are # unicode). binary_columns = () _next_id = None def __init__(self, **kwargs): self.values = self.defaults.copy() self.values.update(kwargs) if self.id_column: if self.values[self.id_column] is None: self.values[self.id_column] = self.nextId() for col in self.required_columns: assert col in kwargs, "%s not specified: %s" % (col, kwargs) for col in self.lists: setattr(self, col, []) for col in self.dicts: setattr(self, col, {}) for col in kwargs: assert col in self.defaults, "%s is not a valid column" % col # cast to unicode for k, v in self.values.items(): if isinstance(v, str): self.values[k] = str(v) # Binary columns stores either (compressed) binary data or encoded # with utf-8 unicode string. We assume that Row constructor receives # only unicode strings and encode them to utf-8 here. # At this moment there is only one such column: logchunks.contents, # which stores either utf-8 encoded string, or gzip-compressed # utf-8 encoded string. for col in self.binary_columns: self.values[col] = unicode2bytes(self.values[col]) # calculate any necessary hashes for hash_col, src_cols in self.hashedColumns: self.values[hash_col] = self.hashColumns( *(self.values[c] for c in src_cols)) # make the values appear as attributes self.__dict__.update(self.values) def __eq__(self, other): if self.__class__ != other.__class__: return False return self.values == other.values def __ne__(self, other): if self.__class__ != other.__class__: return True return self.values != other.values def __lt__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values < other.values def __le__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values <= other.values def __gt__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values > other.values def __ge__(self, other): if self.__class__ != other.__class__: raise TypeError("Cannot compare {} and {}".format( self.__class__, other.__class__)) return self.values >= other.values def __repr__(self): return '%s(**%r)' % (self.__class__.__name__, self.values) def nextId(self): id = Row._next_id if Row._next_id is not None else 1 Row._next_id = id + 1 return id def hashColumns(self, *args): # copied from master/buildbot/db/base.py def encode(x): if x is None: return b'\xf5' elif isinstance(x, str): return x.encode('utf-8') return str(x).encode('utf-8') return hashlib.sha1(b'\0'.join(map(encode, args))).hexdigest() @defer.inlineCallbacks def checkForeignKeys(self, db, t): accessors = dict( buildsetid=db.buildsets.getBuildset, workerid=db.workers.getWorker, builderid=db.builders.getBuilder, buildid=db.builds.getBuild, changesourceid=db.changesources.getChangeSource, changeid=db.changes.getChange, buildrequestid=db.buildrequests.getBuildRequest, sourcestampid=db.sourcestamps.getSourceStamp, schedulerid=db.schedulers.getScheduler, brid=db.buildrequests.getBuildRequest, masterid=db.masters.getMaster) for foreign_key in self.foreignKeys: if foreign_key in accessors: key = getattr(self, foreign_key) if key is not None: val = yield accessors[foreign_key](key) t.assertTrue(val is not None, "foreign key %s:%r does not exit" % (foreign_key, key)) else: raise ValueError( "warning, unsupported foreign key", foreign_key, self.table) class BuildRequest(Row): table = "buildrequests" defaults = dict( id=None, buildsetid=None, builderid=None, buildername=None, priority=0, complete=0, results=-1, submitted_at=12345678, complete_at=None, waited_for=0, ) foreignKeys = ('buildsetid',) id_column = 'id' required_columns = ('buildsetid',) class BuildRequestClaim(Row): table = "buildrequest_claims" defaults = dict( brid=None, masterid=None, claimed_at=None ) foreignKeys = ('brid', 'masterid') required_columns = ('brid', 'masterid', 'claimed_at') class ChangeSource(Row): table = "changesources" defaults = dict( id=None, name='csname', name_hash=None, ) id_column = 'id' hashedColumns = [('name_hash', ('name',))] class ChangeSourceMaster(Row): table = "changesource_masters" defaults = dict( changesourceid=None, masterid=None, ) foreignKeys = ('changesourceid', 'masterid') required_columns = ('changesourceid', 'masterid') class Change(Row): table = "changes" defaults = dict( changeid=None, author='frank', committer='steve', comments='test change', branch='master', revision='abcd', revlink='http://vc/abcd', when_timestamp=1200000, category='cat', repository='repo', codebase='', project='proj', sourcestampid=92, parent_changeids=None, ) lists = ('files', 'uids') dicts = ('properties',) id_column = 'changeid' class ChangeFile(Row): table = "change_files" defaults = dict( changeid=None, filename=None, ) foreignKeys = ('changeid',) required_columns = ('changeid',) class ChangeProperty(Row): table = "change_properties" defaults = dict( changeid=None, property_name=None, property_value=None, ) foreignKeys = ('changeid',) required_columns = ('changeid',) class ChangeUser(Row): table = "change_users" defaults = dict( changeid=None, uid=None, ) foreignKeys = ('changeid',) required_columns = ('changeid',) class Patch(Row): table = "patches" defaults = dict( id=None, patchlevel=0, patch_base64='aGVsbG8sIHdvcmxk', # 'hello, world', patch_author=None, patch_comment=None, subdir=None, ) id_column = 'id' class SourceStamp(Row): table = "sourcestamps" defaults = dict( id=None, branch='master', revision='abcd', patchid=None, repository='repo', codebase='', project='proj', created_at=89834834, ss_hash=None, ) id_column = 'id' hashedColumns = [('ss_hash', ('branch', 'revision', 'repository', 'project', 'codebase', 'patchid',))] class Scheduler(Row): table = "schedulers" defaults = dict( id=None, name='schname', name_hash=None, enabled=1, ) id_column = 'id' hashedColumns = [('name_hash', ('name',))] class SchedulerMaster(Row): table = "scheduler_masters" defaults = dict( schedulerid=None, masterid=None, ) foreignKeys = ('schedulerid', 'masterid') required_columns = ('schedulerid', 'masterid') class SchedulerChange(Row): table = "scheduler_changes" defaults = dict( schedulerid=None, changeid=None, important=1, ) foreignKeys = ('schedulerid', 'changeid') required_columns = ('schedulerid', 'changeid') class Buildset(Row): table = "buildsets" defaults = dict( id=None, external_idstring='extid', reason='because', submitted_at=12345678, complete=0, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None, ) id_column = 'id' class BuildsetProperty(Row): table = "buildset_properties" defaults = dict( buildsetid=None, property_name='prop', property_value='[22, "fakedb"]', ) foreignKeys = ('buildsetid',) required_columns = ('buildsetid', ) class Worker(Row): table = "workers" defaults = dict( id=None, name='some:worker', info={"a": "b"}, paused=0, graceful=0, ) id_column = 'id' required_columns = ('name', ) class BuildsetSourceStamp(Row): table = "buildset_sourcestamps" defaults = dict( id=None, buildsetid=None, sourcestampid=None, ) foreignKeys = ('buildsetid', 'sourcestampid') required_columns = ('buildsetid', 'sourcestampid', ) id_column = 'id' class Object(Row): table = "objects" defaults = dict( id=None, name='nam', class_name='cls', ) id_column = 'id' class ObjectState(Row): table = "object_state" defaults = dict( objectid=None, name='nam', value_json='{}', ) required_columns = ('objectid', ) class User(Row): table = "users" defaults = dict( uid=None, identifier='soap', bb_username=None, bb_password=None, ) id_column = 'uid' class UserInfo(Row): table = "users_info" defaults = dict( uid=None, attr_type='git', attr_data='Tyler Durden ', ) foreignKeys = ('uid',) required_columns = ('uid', ) class Build(Row): table = "builds" defaults = dict( id=None, number=29, buildrequestid=None, builderid=None, workerid=-1, masterid=None, started_at=1304262222, complete_at=None, state_string="test", results=None) id_column = 'id' foreignKeys = ('buildrequestid', 'masterid', 'workerid', 'builderid') required_columns = ('buildrequestid', 'masterid', 'workerid') class BuildProperty(Row): table = "build_properties" defaults = dict( buildid=None, name='prop', value=42, source='fakedb' ) foreignKeys = ('buildid',) required_columns = ('buildid',) class Step(Row): table = "steps" defaults = dict( id=None, number=29, name='step29', buildid=None, started_at=1304262222, complete_at=None, state_string='', results=None, urls_json='[]', hidden=0) id_column = 'id' foreignKeys = ('buildid',) required_columns = ('buildid', ) class Log(Row): table = "logs" defaults = dict( id=None, name='log29', slug='log29', stepid=None, complete=0, num_lines=0, type='s') id_column = 'id' required_columns = ('stepid', ) class LogChunk(Row): table = "logchunks" defaults = dict( logid=None, first_line=0, last_line=0, content='', compressed=0) required_columns = ('logid', ) # 'content' column is sa.LargeBinary, it's bytestring. binary_columns = ('content',) class Master(Row): table = "masters" defaults = dict( id=None, name='some:master', name_hash=None, active=1, last_active=9998999, ) id_column = 'id' hashedColumns = [('name_hash', ('name',))] class Builder(Row): table = "builders" defaults = dict( id=None, name='some:builder', name_hash=None, description=None, ) id_column = 'id' hashedColumns = [('name_hash', ('name',))] class BuilderMaster(Row): table = "builder_masters" defaults = dict( id=None, builderid=None, masterid=None ) id_column = 'id' required_columns = ('builderid', 'masterid') class Tag(Row): table = "tags" defaults = dict( id=None, name='some:tag', name_hash=None, ) id_column = 'id' hashedColumns = [('name_hash', ('name',))] class BuildersTags(Row): table = "builders_tags" defaults = dict( id=None, builderid=None, tagid=None, ) foreignKeys = ('builderid', 'tagid') required_columns = ('builderid', 'tagid', ) id_column = 'id' class ConnectedWorker(Row): table = "connected_workers" defaults = dict( id=None, masterid=None, workerid=None, ) id_column = 'id' required_columns = ('masterid', 'workerid') class ConfiguredWorker(Row): table = "configured_workers" defaults = dict( id=None, buildermasterid=None, workerid=None, ) id_column = 'id' required_columns = ('buildermasterid', 'workerid') # Fake DB Components class FakeDBComponent: data2db = {} def __init__(self, db, testcase): self.db = db self.t = testcase self.reactor = testcase.reactor self.setUp() def mapFilter(self, f, fieldMapping): field = fieldMapping[f.field].split(".")[-1] return resultspec.Filter(field, f.op, f.values) def mapOrder(self, o, fieldMapping): if o.startswith('-'): reverse, o = o[0], o[1:] else: reverse = "" o = fieldMapping[o].split(".")[-1] return reverse + o def applyResultSpec(self, data, rs): def applicable(field): if field.startswith('-'): field = field[1:] return field in rs.fieldMapping filters = [self.mapFilter(f, rs.fieldMapping) for f in rs.filters if applicable(f.field)] order = [] offset = limit = None if rs.order: order = [self.mapOrder(o, rs.fieldMapping) for o in rs.order if applicable(o)] if len(filters) == len(rs.filters) and rs.order is not None and len(order) == len(rs.order): offset, limit = rs.offset, rs.limit rs = resultspec.ResultSpec( filters=filters, order=order, limit=limit, offset=offset) return rs.apply(data) class FakeChangeSourcesComponent(FakeDBComponent): def setUp(self): self.changesources = {} self.changesource_masters = {} self.states = {} def insertTestData(self, rows): for row in rows: if isinstance(row, ChangeSource): self.changesources[row.id] = row.name if isinstance(row, ChangeSourceMaster): self.changesource_masters[row.changesourceid] = row.masterid # component methods def findChangeSourceId(self, name): for cs_id, cs_name in self.changesources.items(): if cs_name == name: return defer.succeed(cs_id) new_id = (max(self.changesources) + 1) if self.changesources else 1 self.changesources[new_id] = name return defer.succeed(new_id) def getChangeSource(self, changesourceid): if changesourceid in self.changesources: rv = dict( id=changesourceid, name=self.changesources[changesourceid], masterid=None) # only set masterid if the relevant changesource master exists and # is active rv['masterid'] = self.changesource_masters.get(changesourceid) return defer.succeed(rv) return None def getChangeSources(self, active=None, masterid=None): d = defer.DeferredList([ self.getChangeSource(id) for id in self.changesources ]) @d.addCallback def filter(results): # filter off the DeferredList results (we know it's good) results = [r[1] for r in results] # filter for masterid if masterid is not None: results = [r for r in results if r['masterid'] == masterid] # filter for active or inactive if necessary if active: results = [r for r in results if r['masterid'] is not None] elif active is not None: results = [r for r in results if r['masterid'] is None] return results return d def setChangeSourceMaster(self, changesourceid, masterid): current_masterid = self.changesource_masters.get(changesourceid) if current_masterid and masterid is not None and current_masterid != masterid: return defer.fail(changesources.ChangeSourceAlreadyClaimedError()) self.changesource_masters[changesourceid] = masterid return defer.succeed(None) # fake methods def fakeChangeSource(self, name, changesourceid): self.changesources[changesourceid] = name def fakeChangeSourceMaster(self, changesourceid, masterid): if masterid is not None: self.changesource_masters[changesourceid] = masterid else: del self.changesource_masters[changesourceid] # assertions def assertChangeSourceMaster(self, changesourceid, masterid): self.t.assertEqual(self.changesource_masters.get(changesourceid), masterid) class FakeChangesComponent(FakeDBComponent): def setUp(self): self.changes = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Change): # copy this since we'll be modifying it (e.g., adding files) ch = self.changes[row.changeid] = copy.deepcopy(row.values) ch['files'] = [] ch['properties'] = {} ch['uids'] = [] elif isinstance(row, ChangeFile): ch = self.changes[row.changeid] ch['files'].append(row.filename) elif isinstance(row, ChangeProperty): ch = self.changes[row.changeid] n, vs = row.property_name, row.property_value v, s = json.loads(vs) ch['properties'][n] = (v, s) elif isinstance(row, ChangeUser): ch = self.changes[row.changeid] ch['uids'].append(row.uid) # component methods @defer.inlineCallbacks def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): if properties is None: properties = {} if self.changes: changeid = max(list(self.changes)) + 1 else: changeid = 500 ssid = yield self.db.sourcestamps.findSourceStampId( revision=revision, branch=branch, repository=repository, codebase=codebase, project=project) parent_changeids = yield self.getParentChangeIds(branch, repository, project, codebase) self.changes[changeid] = ch = dict( changeid=changeid, parent_changeids=parent_changeids, author=author, committer=committer, comments=comments, revision=revision, when_timestamp=datetime2epoch(when_timestamp), branch=branch, category=category, revlink=revlink, repository=repository, project=project, codebase=codebase, uids=[], files=files, properties=properties, sourcestampid=ssid) if uid: ch['uids'].append(uid) return changeid def getLatestChangeid(self): if self.changes: return defer.succeed(max(list(self.changes))) return defer.succeed(None) def getParentChangeIds(self, branch, repository, project, codebase): if self.changes: for changeid, change in self.changes.items(): if (change['branch'] == branch and change['repository'] == repository and change['project'] == project and change['codebase'] == codebase): return defer.succeed([change['changeid']]) return defer.succeed([]) def getChange(self, key, no_cache=False): try: row = self.changes[key] except KeyError: return defer.succeed(None) return defer.succeed(self._chdict(row)) def getChangeUids(self, changeid): try: ch_uids = self.changes[changeid]['uids'] except KeyError: ch_uids = [] return defer.succeed(ch_uids) def getRecentChanges(self, count): ids = sorted(self.changes.keys()) chdicts = [self._chdict(self.changes[id]) for id in ids[-count:]] return defer.succeed(chdicts) def getChanges(self): chdicts = [self._chdict(v) for v in self.changes.values()] return defer.succeed(chdicts) def getChangesCount(self): return defer.succeed(len(self.changes)) def getChangesForBuild(self, buildid): # the algorithm is too complicated to be worth faked, better patch it # ad-hoc raise NotImplementedError( "Please patch in tests to return appropriate results") def getChangeFromSSid(self, ssid): chdicts = [self._chdict(v) for v in self.changes.values() if v['sourcestampid'] == ssid] if chdicts: return defer.succeed(chdicts[0]) return defer.succeed(None) def _chdict(self, row): chdict = row.copy() del chdict['uids'] if chdict['parent_changeids'] is None: chdict['parent_changeids'] = [] chdict['when_timestamp'] = epoch2datetime(chdict['when_timestamp']) return chdict # assertions def assertChange(self, changeid, row): row_only = self.changes[changeid].copy() del row_only['files'] del row_only['properties'] del row_only['uids'] if not row_only['parent_changeids']: # Convert [] to None # None is the value stored in the DB. # We need this kind of conversion, because for the moment we only support # 1 parent for a change. # When we will support multiple parent for change, then we will have a # table parent_changes with at least 2 col: "changeid", "parent_changeid" # And the col 'parent_changeids' of the table changes will be # dropped row_only['parent_changeids'] = None self.t.assertEqual(row_only, row.values) def assertChangeUsers(self, changeid, expectedUids): self.t.assertEqual(self.changes[changeid]['uids'], expectedUids) # fake methods def fakeAddChangeInstance(self, change): if not hasattr(change, 'number') or not change.number: if self.changes: changeid = max(list(self.changes)) + 1 else: changeid = 500 else: changeid = change.number # make a row from the change row = dict( changeid=changeid, author=change.who, files=change.files, comments=change.comments, revision=change.revision, when_timestamp=change.when, branch=change.branch, category=change.category, revlink=change.revlink, properties=change.properties, repository=change.repository, codebase=change.codebase, project=change.project, uids=[]) self.changes[changeid] = row class FakeSchedulersComponent(FakeDBComponent): def setUp(self): self.schedulers = {} self.scheduler_masters = {} self.states = {} self.classifications = {} self.enabled = {} def insertTestData(self, rows): for row in rows: if isinstance(row, SchedulerChange): cls = self.classifications.setdefault(row.schedulerid, {}) cls[row.changeid] = row.important if isinstance(row, Scheduler): self.schedulers[row.id] = row.name self.enabled[row.id] = True if isinstance(row, SchedulerMaster): self.scheduler_masters[row.schedulerid] = row.masterid # component methods def classifyChanges(self, schedulerid, classifications): self.classifications.setdefault( schedulerid, {}).update(classifications) return defer.succeed(None) def flushChangeClassifications(self, schedulerid, less_than=None): if less_than is not None: classifications = self.classifications.setdefault(schedulerid, {}) for changeid in list(classifications): if changeid < less_than: del classifications[changeid] else: self.classifications[schedulerid] = {} return defer.succeed(None) def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): classifications = self.classifications.setdefault(schedulerid, {}) sentinel = dict(branch=object(), repository=object(), project=object(), codebase=object()) if branch != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['branch'] == branch) if repository != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['repository'] == repository) if project != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['project'] == project) if codebase != -1: # filter out the classifications for the requested branch classifications = dict( (k, v) for (k, v) in classifications.items() if self.db.changes.changes.get(k, sentinel)['codebase'] == codebase) return defer.succeed(classifications) def findSchedulerId(self, name): for sch_id, sch_name in self.schedulers.items(): if sch_name == name: return defer.succeed(sch_id) new_id = (max(self.schedulers) + 1) if self.schedulers else 1 self.schedulers[new_id] = name return defer.succeed(new_id) def getScheduler(self, schedulerid): if schedulerid in self.schedulers: rv = dict( id=schedulerid, name=self.schedulers[schedulerid], enabled=self.enabled.get(schedulerid, True), masterid=None) # only set masterid if the relevant scheduler master exists and # is active rv['masterid'] = self.scheduler_masters.get(schedulerid) return defer.succeed(rv) return None def getSchedulers(self, active=None, masterid=None): d = defer.DeferredList([ self.getScheduler(id) for id in self.schedulers ]) @d.addCallback def filter(results): # filter off the DeferredList results (we know it's good) results = [r[1] for r in results] # filter for masterid if masterid is not None: results = [r for r in results if r['masterid'] == masterid] # filter for active or inactive if necessary if active: results = [r for r in results if r['masterid'] is not None] elif active is not None: results = [r for r in results if r['masterid'] is None] return results return d def setSchedulerMaster(self, schedulerid, masterid): current_masterid = self.scheduler_masters.get(schedulerid) if current_masterid and masterid is not None and current_masterid != masterid: return defer.fail(schedulers.SchedulerAlreadyClaimedError()) self.scheduler_masters[schedulerid] = masterid return defer.succeed(None) # fake methods def fakeClassifications(self, schedulerid, classifications): """Set the set of classifications for a scheduler""" self.classifications[schedulerid] = classifications def fakeScheduler(self, name, schedulerid): self.schedulers[schedulerid] = name def fakeSchedulerMaster(self, schedulerid, masterid): if masterid is not None: self.scheduler_masters[schedulerid] = masterid else: del self.scheduler_masters[schedulerid] # assertions def assertClassifications(self, schedulerid, classifications): self.t.assertEqual( self.classifications.get(schedulerid, {}), classifications) def assertSchedulerMaster(self, schedulerid, masterid): self.t.assertEqual(self.scheduler_masters.get(schedulerid), masterid) def enable(self, schedulerid, v): assert schedulerid in self.schedulers self.enabled[schedulerid] = v return defer.succeed((('control', 'schedulers', schedulerid, 'enable'), {'enabled': v})) class FakeSourceStampsComponent(FakeDBComponent): def setUp(self): self.sourcestamps = {} self.patches = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Patch): self.patches[row.id] = dict( patch_level=row.patchlevel, patch_body=base64.b64decode(row.patch_base64), patch_author=row.patch_author, patch_comment=row.patch_comment, patch_subdir=row.subdir) for row in rows: if isinstance(row, SourceStamp): ss = self.sourcestamps[row.id] = row.values.copy() ss['created_at'] = epoch2datetime(ss['created_at']) del ss['ss_hash'] del ss['id'] # component methods def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): d = self.findOrCreateId( branch, revision, repository, project, codebase, patch_body, patch_level, patch_author, patch_comment, patch_subdir) d.addCallback(lambda pair: pair[0]) return d def findOrCreateId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): if patch_body: patchid = len(self.patches) + 1 while patchid in self.patches: patchid += 1 self.patches[patchid] = dict( patch_level=patch_level, patch_body=patch_body, patch_subdir=patch_subdir, patch_author=patch_author, patch_comment=patch_comment ) else: patchid = None new_ssdict = dict(branch=branch, revision=revision, codebase=codebase, patchid=patchid, repository=repository, project=project, created_at=epoch2datetime(self.reactor.seconds())) for id, ssdict in self.sourcestamps.items(): keys = ['branch', 'revision', 'repository', 'codebase', 'project', 'patchid'] if [ssdict[k] for k in keys] == [new_ssdict[k] for k in keys]: return defer.succeed((id, True)) id = len(self.sourcestamps) + 100 while id in self.sourcestamps: id += 1 self.sourcestamps[id] = new_ssdict return defer.succeed((id, False)) def getSourceStamp(self, key, no_cache=False): return defer.succeed(self._getSourceStamp_sync(key)) def getSourceStamps(self): return defer.succeed([ self._getSourceStamp_sync(ssid) for ssid in self.sourcestamps ]) def _getSourceStamp_sync(self, ssid): if ssid in self.sourcestamps: ssdict = self.sourcestamps[ssid].copy() ssdict['ssid'] = ssid patchid = ssdict['patchid'] if patchid: ssdict.update(self.patches[patchid]) ssdict['patchid'] = patchid else: ssdict['patch_body'] = None ssdict['patch_level'] = None ssdict['patch_subdir'] = None ssdict['patch_author'] = None ssdict['patch_comment'] = None return ssdict else: return None @defer.inlineCallbacks def getSourceStampsForBuild(self, buildid): build = yield self.db.builds.getBuild(buildid) breq = yield self.db.buildrequests.getBuildRequest(build['buildrequestid']) bset = yield self.db.buildsets.getBuildset(breq['buildsetid']) results = [] for ssid in bset['sourcestamps']: results.append((yield self.getSourceStamp(ssid))) return results class FakeBuildsetsComponent(FakeDBComponent): def setUp(self): self.buildsets = {} self.completed_bsids = set() self.buildset_sourcestamps = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Buildset): bs = self.buildsets[row.id] = row.values.copy() bs['properties'] = {} for row in rows: if isinstance(row, BuildsetProperty): assert row.buildsetid in self.buildsets n = row.property_name v, src = tuple(json.loads(row.property_value)) self.buildsets[row.buildsetid]['properties'][n] = (v, src) for row in rows: if isinstance(row, BuildsetSourceStamp): assert row.buildsetid in self.buildsets self.buildset_sourcestamps.setdefault(row.buildsetid, []).append(row.sourcestampid) # component methods def _newBsid(self): bsid = 200 while bsid in self.buildsets: bsid += 1 return bsid @defer.inlineCallbacks def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): # We've gotten this wrong a couple times. assert isinstance( waited_for, bool), 'waited_for should be boolean: %r' % waited_for # calculate submitted at if submitted_at is not None: submitted_at = datetime2epoch(submitted_at) else: submitted_at = int(self.reactor.seconds()) bsid = self._newBsid() br_rows = [] for builderid in builderids: br_rows.append( BuildRequest(buildsetid=bsid, builderid=builderid, waited_for=waited_for, submitted_at=submitted_at)) self.db.buildrequests.insertTestData(br_rows) # make up a row and keep its dictionary, with the properties tacked on bsrow = Buildset(id=bsid, reason=reason, external_idstring=external_idstring, submitted_at=submitted_at, parent_buildid=parent_buildid, parent_relationship=parent_relationship) self.buildsets[bsid] = bsrow.values.copy() self.buildsets[bsid]['properties'] = properties # add sourcestamps ssids = [] for ss in sourcestamps: if not isinstance(ss, type(1)): ss = yield self.db.sourcestamps.findSourceStampId(**ss) ssids.append(ss) self.buildset_sourcestamps[bsid] = ssids return (bsid, {br.builderid: br.id for br in br_rows}) def completeBuildset(self, bsid, results, complete_at=None): if bsid not in self.buildsets or self.buildsets[bsid]['complete']: raise buildsets.AlreadyCompleteError() if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.reactor.seconds()) self.buildsets[bsid]['results'] = results self.buildsets[bsid]['complete'] = 1 self.buildsets[bsid]['complete_at'] = complete_at return defer.succeed(None) def getBuildset(self, bsid): if bsid not in self.buildsets: return defer.succeed(None) row = self.buildsets[bsid] return defer.succeed(self._row2dict(row)) def getBuildsets(self, complete=None, resultSpec=None): rv = [] for bs in self.buildsets.values(): if complete is not None: if complete and bs['complete']: rv.append(self._row2dict(bs)) elif not complete and not bs['complete']: rv.append(self._row2dict(bs)) else: rv.append(self._row2dict(bs)) if resultSpec is not None: rv = self.applyResultSpec(rv, resultSpec) return defer.succeed(rv) @defer.inlineCallbacks def getRecentBuildsets(self, count=None, branch=None, repository=None, complete=None): if not count: return [] rv = [] for bs in (yield self.getBuildsets(complete=complete)): if branch or repository: ok = True if not bs['sourcestamps']: # no sourcestamps -> no match ok = False for ssid in bs['sourcestamps']: ss = yield self.db.sourcestamps.getSourceStamp(ssid) if branch and ss['branch'] != branch: ok = False if repository and ss['repository'] != repository: ok = False else: ok = True if ok: rv.append(bs) rv.sort(key=lambda bs: -bs['bsid']) return list(reversed(rv[:count])) def _row2dict(self, row): row = row.copy() row['complete_at'] = epoch2datetime(row['complete_at']) row['submitted_at'] = epoch2datetime(row['submitted_at']) row['complete'] = bool(row['complete']) row['bsid'] = row['id'] row['sourcestamps'] = self.buildset_sourcestamps.get(row['id'], []) del row['id'] del row['properties'] return row def getBuildsetProperties(self, key, no_cache=False): if key in self.buildsets: return defer.succeed( self.buildsets[key]['properties']) return defer.succeed({}) # fake methods def fakeBuildsetCompletion(self, bsid, result): assert bsid in self.buildsets self.buildsets[bsid]['results'] = result self.completed_bsids.add(bsid) # assertions def assertBuildsetCompletion(self, bsid, complete): """Assert that the completion state of buildset BSID is COMPLETE""" actual = self.buildsets[bsid]['complete'] self.t.assertTrue( (actual and complete) or (not actual and not complete)) def assertBuildset(self, bsid=None, expected_buildset=None): """Assert that the given buildset looks as expected; the ssid parameter of the buildset is omitted. Properties are converted with asList and sorted. Attributes complete, complete_at, submitted_at, results, and parent_* are ignored if not specified.""" self.t.assertIn(bsid, self.buildsets) buildset = self.buildsets[bsid].copy() del buildset['id'] # clear out some columns if the caller doesn't care for col in 'complete complete_at submitted_at results parent_buildid parent_relationship'.split(): if col not in expected_buildset: del buildset[col] if buildset['properties']: buildset['properties'] = sorted(buildset['properties'].items()) self.t.assertEqual(buildset, expected_buildset) return bsid class FakeWorkersComponent(FakeDBComponent): def setUp(self): self.workers = {} self.configured = {} self.connected = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Worker): self.workers[row.id] = dict( id=row.id, name=row.name, paused=0, graceful=0, info=row.info) elif isinstance(row, ConfiguredWorker): row.id = row.buildermasterid * 10000 + row.workerid self.configured[row.id] = dict( buildermasterid=row.buildermasterid, workerid=row.workerid) elif isinstance(row, ConnectedWorker): self.connected[row.id] = dict( masterid=row.masterid, workerid=row.workerid) def findWorkerId(self, name): validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) for m in self.workers.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.workers) + 1 self.workers[id] = dict( id=id, name=name, info={}) return defer.succeed(id) def _getWorkerByName(self, name): for worker in self.workers.values(): if worker['name'] == name: return worker return None def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): # get the id and the worker if workerid is None: for worker in self.workers.values(): if worker['name'] == name: workerid = worker['id'] break else: worker = None else: worker = self.workers.get(workerid) if not worker: return defer.succeed(None) # now get the connection status per builder_master, filtered # by builderid and masterid return defer.succeed(self._mkdict(worker, builderid, masterid)) def getWorkers(self, masterid=None, builderid=None, paused=None, graceful=None): if masterid is not None or builderid is not None: builder_masters = self.db.builders.builder_masters workers = [] for worker in self.workers.values(): configured = [cfg for cfg in self.configured.values() if cfg['workerid'] == worker['id']] pairs = [builder_masters[cfg['buildermasterid']] for cfg in configured] if builderid is not None and masterid is not None: if (builderid, masterid) not in pairs: continue if builderid is not None: if not any(builderid == p[0] for p in pairs): continue if masterid is not None: if not any((masterid == p[1]) for p in pairs): continue workers.append(worker) else: workers = list(self.workers.values()) if paused is not None: workers = [w for w in workers if w['paused'] == paused] if graceful is not None: workers = [w for w in workers if w['graceful'] == graceful] return defer.succeed([ self._mkdict(worker, builderid, masterid) for worker in workers]) def workerConnected(self, workerid, masterid, workerinfo): worker = self.workers.get(workerid) # test serialization json.dumps(workerinfo) if worker is not None: worker['info'] = workerinfo new_conn = dict(masterid=masterid, workerid=workerid) if new_conn not in self.connected.values(): conn_id = max([0] + list(self.connected)) + 1 self.connected[conn_id] = new_conn return defer.succeed(None) def deconfigureAllWorkersForMaster(self, masterid): buildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid] for k, v in list(self.configured.items()): if v['buildermasterid'] in buildermasterids: del self.configured[k] def workerConfigured(self, workerid, masterid, builderids): buildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid and builderid in builderids] if len(buildermasterids) != len(builderids): raise ValueError("Some builders are not configured for this master: " "builders: %s, master: %s buildermaster:%s" % (builderids, masterid, self.db.builders.builder_masters)) allbuildermasterids = [_id for _id, (builderid, mid) in self.db.builders.builder_masters.items() if mid == masterid] for k, v in list(self.configured.items()): if v['buildermasterid'] in allbuildermasterids and v['workerid'] == workerid: del self.configured[k] self.insertTestData([ConfiguredWorker(workerid=workerid, buildermasterid=buildermasterid) for buildermasterid in buildermasterids]) return defer.succeed(None) def workerDisconnected(self, workerid, masterid): del_conn = dict(masterid=masterid, workerid=workerid) for id, conn in self.connected.items(): if conn == del_conn: del self.connected[id] break return defer.succeed(None) def setWorkerState(self, workerid, paused, graceful): worker = self.workers.get(workerid) if worker is not None: worker['paused'] = int(paused) worker['graceful'] = int(graceful) def _configuredOn(self, workerid, builderid=None, masterid=None): cfg = [] for cs in self.configured.values(): if cs['workerid'] != workerid: continue bid, mid = self.db.builders.builder_masters[cs['buildermasterid']] if builderid is not None and bid != builderid: continue if masterid is not None and mid != masterid: continue cfg.append({'builderid': bid, 'masterid': mid}) return cfg def _connectedTo(self, workerid, masterid=None): conns = [] for cs in self.connected.values(): if cs['workerid'] != workerid: continue if masterid is not None and cs['masterid'] != masterid: continue conns.append(cs['masterid']) return conns def _mkdict(self, w, builderid, masterid): return { 'id': w['id'], 'workerinfo': w['info'], 'name': w['name'], 'paused': bool(w.get('paused')), 'graceful': bool(w.get('graceful')), 'configured_on': self._configuredOn(w['id'], builderid, masterid), 'connected_to': self._connectedTo(w['id'], masterid), } class FakeStateComponent(FakeDBComponent): def setUp(self): self.objects = {} self.states = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Object): self.objects[(row.name, row.class_name)] = row.id self.states[row.id] = {} for row in rows: if isinstance(row, ObjectState): assert row.objectid in list(self.objects.values()) self.states[row.objectid][row.name] = row.value_json # component methods def _newId(self): id = 100 while id in self.states: id += 1 return id def getObjectId(self, name, class_name): try: id = self.objects[(name, class_name)] except KeyError: # invent a new id and add it id = self.objects[(name, class_name)] = self._newId() self.states[id] = {} return defer.succeed(id) def getState(self, objectid, name, default=object): try: json_value = self.states[objectid][name] except KeyError: if default is not object: return defer.succeed(default) raise return defer.succeed(json.loads(json_value)) def setState(self, objectid, name, value): self.states[objectid][name] = json.dumps(value) return defer.succeed(None) def atomicCreateState(self, objectid, name, thd_create_callback): value = thd_create_callback() self.states[objectid][name] = json.dumps(bytes2unicode(value)) return defer.succeed(value) # fake methods def fakeState(self, name, class_name, **kwargs): id = self.objects[(name, class_name)] = self._newId() self.objects[(name, class_name)] = id self.states[id] = dict((k, json.dumps(v)) for k, v in kwargs.items()) return id # assertions def assertState(self, objectid, missing_keys=None, **kwargs): if missing_keys is None: missing_keys = [] state = self.states[objectid] for k in missing_keys: self.t.assertFalse(k in state, "%s in %s" % (k, state)) for k, v in kwargs.items(): self.t.assertIn(k, state) self.t.assertEqual(json.loads(state[k]), v, "state is %r" % (state,)) def assertStateByClass(self, name, class_name, **kwargs): objectid = self.objects[(name, class_name)] state = self.states[objectid] for k, v in kwargs.items(): self.t.assertIn(k, state) self.t.assertEqual(json.loads(state[k]), v, "state is %r" % (state,)) class FakeBuildRequestsComponent(FakeDBComponent): # for use in determining "my" requests MASTER_ID = 824 def setUp(self): self.reqs = {} self.claims = {} def insertTestData(self, rows): for row in rows: if isinstance(row, BuildRequest): self.reqs[row.id] = row if isinstance(row, BuildRequestClaim): self.claims[row.brid] = row # component methods @defer.inlineCallbacks def getBuildRequest(self, brid): row = self.reqs.get(brid) if row: claim_row = self.claims.get(brid, None) if claim_row: row.claimed_at = claim_row.claimed_at row.claimed = True row.masterid = claim_row.masterid row.claimed_by_masterid = claim_row.masterid else: row.claimed_at = None builder = yield self.db.builders.getBuilder(row.builderid) row.buildername = builder["name"] return self._brdictFromRow(row) else: return None @defer.inlineCallbacks def getBuildRequests(self, builderid=None, complete=None, claimed=None, bsid=None, branch=None, repository=None, resultSpec=None): rv = [] for br in self.reqs.values(): if builderid and br.builderid != builderid: continue if complete is not None: if complete and not br.complete: continue if not complete and br.complete: continue claim_row = self.claims.get(br.id) if claim_row: br.claimed_at = claim_row.claimed_at br.claimed = True br.masterid = claim_row.masterid br.claimed_by_masterid = claim_row.masterid else: br.claimed_at = None if claimed is not None: if isinstance(claimed, bool): if claimed: if not claim_row: continue else: if br.complete or claim_row: continue else: if not claim_row or claim_row.masterid != claimed: continue if bsid is not None: if br.buildsetid != bsid: continue if branch or repository: buildset = yield self.db.buildsets.getBuildset(br.buildsetid) sourcestamps = [] for ssid in buildset['sourcestamps']: sourcestamps.append((yield self.db.sourcestamps.getSourceStamp(ssid))) if branch and not any(branch == s['branch'] for s in sourcestamps): continue if repository and not any(repository == s['repository'] for s in sourcestamps): continue builder = yield self.db.builders.getBuilder(br.builderid) br.buildername = builder["name"] rv.append(self._brdictFromRow(br)) if resultSpec is not None: rv = self.applyResultSpec(rv, resultSpec) return rv def claimBuildRequests(self, brids, claimed_at=None): for brid in brids: if brid not in self.reqs or brid in self.claims: raise buildrequests.AlreadyClaimedError if claimed_at is not None: claimed_at = datetime2epoch(claimed_at) else: claimed_at = int(self.reactor.seconds()) # now that we've thrown any necessary exceptions, get started for brid in brids: self.claims[brid] = BuildRequestClaim(brid=brid, masterid=self.MASTER_ID, claimed_at=claimed_at) return defer.succeed(None) def unclaimBuildRequests(self, brids): for brid in brids: if brid in self.claims and self.claims[brid].masterid == self.db.master.masterid: self.claims.pop(brid) def completeBuildRequests(self, brids, results, complete_at=None): if complete_at is not None: complete_at = datetime2epoch(complete_at) else: complete_at = int(self.reactor.seconds()) for brid in brids: if brid not in self.reqs or self.reqs[brid].complete == 1: raise buildrequests.NotClaimedError for brid in brids: self.reqs[brid].complete = 1 self.reqs[brid].results = results self.reqs[brid].complete_at = complete_at return defer.succeed(None) def _brdictFromRow(self, row): return buildrequests.BuildRequestsConnectorComponent._brdictFromRow(row, self.MASTER_ID) # fake methods def fakeClaimBuildRequest(self, brid, claimed_at=None, masterid=None): if masterid is None: masterid = self.MASTER_ID self.claims[brid] = BuildRequestClaim(brid=brid, masterid=masterid, claimed_at=self.reactor.seconds()) def fakeUnclaimBuildRequest(self, brid): del self.claims[brid] # assertions def assertMyClaims(self, claimed_brids): self.t.assertEqual( [id for (id, brc) in self.claims.items() if brc.masterid == self.MASTER_ID], claimed_brids) class FakeBuildsComponent(FakeDBComponent): def setUp(self): self.builds = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Build): build = self.builds[row.id] = row.values.copy() build['properties'] = {} for row in rows: if isinstance(row, BuildProperty): assert row.buildid in self.builds self.builds[row.buildid]['properties'][ row.name] = (row.value, row.source) # component methods def _newId(self): id = 100 while id in self.builds: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], number=row['number'], buildrequestid=row['buildrequestid'], builderid=row['builderid'], masterid=row['masterid'], workerid=row['workerid'], started_at=epoch2datetime(row['started_at']), complete_at=epoch2datetime(row['complete_at']), state_string=row['state_string'], results=row['results']) def getBuild(self, buildid): row = self.builds.get(buildid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getBuildByNumber(self, builderid, number): for row in self.builds.values(): if row['builderid'] == builderid and row['number'] == number: return defer.succeed(self._row2dict(row)) return defer.succeed(None) def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): ret = [] for (id, row) in self.builds.items(): if builderid is not None and row['builderid'] != builderid: continue if buildrequestid is not None and row['buildrequestid'] != buildrequestid: continue if workerid is not None and row['workerid'] != workerid: continue if complete is not None and complete != (row['complete_at'] is not None): continue ret.append(self._row2dict(row)) if resultSpec is not None: ret = self.applyResultSpec(ret, resultSpec) return defer.succeed(ret) def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) id = self._newId() number = max([0] + [r['number'] for r in self.builds.values() if r['builderid'] == builderid]) + 1 self.builds[id] = dict(id=id, number=number, buildrequestid=buildrequestid, builderid=builderid, workerid=workerid, masterid=masterid, state_string=state_string, started_at=self.reactor.seconds(), complete_at=None, results=None) return defer.succeed((id, number)) def setBuildStateString(self, buildid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) b = self.builds.get(buildid) if b: b['state_string'] = state_string return defer.succeed(None) def finishBuild(self, buildid, results): now = self.reactor.seconds() b = self.builds.get(buildid) if b: b['complete_at'] = now b['results'] = results return defer.succeed(None) def getBuildProperties(self, bid): if bid in self.builds: return defer.succeed(self.builds[bid]['properties']) return defer.succeed({}) def setBuildProperty(self, bid, name, value, source): assert bid in self.builds self.builds[bid]['properties'][name] = (value, source) return defer.succeed(None) @defer.inlineCallbacks def getBuildsForChange(self, changeid): change = yield self.db.changes.getChange(changeid) bsets = yield self.db.buildsets.getBuildsets() breqs = yield self.db.buildrequests.getBuildRequests() builds = yield self.db.builds.getBuilds() results = [] for bset in bsets: for ssid in bset['sourcestamps']: if change['sourcestampid'] == ssid: bset['changeid'] = changeid results.append({'buildsetid': bset['bsid']}) for breq in breqs: for result in results: if result['buildsetid'] == breq['buildsetid']: result['buildrequestid'] = breq['buildrequestid'] for build in builds: for result in results: if result['buildrequestid'] == build['buildrequestid']: result['id'] = build['id'] result['number'] = build['number'] result['builderid'] = build['builderid'] result['workerid'] = build['workerid'] result['masterid'] = build['masterid'] result['started_at'] = epoch2datetime(1304262222) result['complete_at'] = build['complete_at'] result['state_string'] = build['state_string'] result['results'] = build['results'] for result in results: del result['buildsetid'] return results class FakeStepsComponent(FakeDBComponent): def setUp(self): self.steps = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Step): self.steps[row.id] = row.values.copy() # component methods def _newId(self): id = 100 while id in self.steps: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], buildid=row['buildid'], number=row['number'], name=row['name'], started_at=epoch2datetime(row['started_at']), complete_at=epoch2datetime(row['complete_at']), state_string=row['state_string'], results=row['results'], urls=json.loads(row['urls_json']), hidden=bool(row['hidden'])) def getStep(self, stepid=None, buildid=None, number=None, name=None): if stepid is not None: row = self.steps.get(stepid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) else: if number is None and name is None: return defer.fail(RuntimeError("specify both name and number")) for row in self.steps.values(): if row['buildid'] != buildid: continue if number is not None and row['number'] != number: continue if name is not None and row['name'] != name: continue return defer.succeed(self._row2dict(row)) return defer.succeed(None) def getSteps(self, buildid): ret = [] for row in self.steps.values(): if row['buildid'] != buildid: continue ret.append(self._row2dict(row)) ret.sort(key=lambda r: r['number']) return defer.succeed(ret) def addStep(self, buildid, name, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) # get a unique name and number build_steps = [r for r in self.steps.values() if r['buildid'] == buildid] if build_steps: number = max([r['number'] for r in build_steps]) + 1 names = {r['name'] for r in build_steps} if name in names: i = 1 while '%s_%d' % (name, i) in names: i += 1 name = '%s_%d' % (name, i) else: number = 0 id = self._newId() self.steps[id] = { 'id': id, 'buildid': buildid, 'number': number, 'name': name, 'started_at': None, 'complete_at': None, 'results': None, 'state_string': state_string, 'urls_json': '[]', 'hidden': False} return defer.succeed((id, number, name)) def startStep(self, stepid): b = self.steps.get(stepid) if b: b['started_at'] = self.reactor.seconds() return defer.succeed(None) def setStepStateString(self, stepid, state_string): validation.verifyType(self.t, 'state_string', state_string, validation.StringValidator()) b = self.steps.get(stepid) if b: b['state_string'] = state_string return defer.succeed(None) def addURL(self, stepid, name, url, _racehook=None): validation.verifyType(self.t, 'stepid', stepid, validation.IntValidator()) validation.verifyType(self.t, 'name', name, validation.IdentifierValidator(50)) validation.verifyType(self.t, 'url', url, validation.StringValidator()) b = self.steps.get(stepid) if b: urls = json.loads(b['urls_json']) url_item = dict(name=name, url=url) if url_item not in urls: urls.append(url_item) b['urls_json'] = json.dumps(urls) return defer.succeed(None) def finishStep(self, stepid, results, hidden): now = self.reactor.seconds() b = self.steps.get(stepid) if b: b['complete_at'] = now b['results'] = results b['hidden'] = bool(hidden) return defer.succeed(None) class FakeLogsComponent(FakeDBComponent): def setUp(self): self.logs = {} self.log_lines = {} # { logid : [ lines ] } def insertTestData(self, rows): for row in rows: if isinstance(row, Log): self.logs[row.id] = row.values.copy() for row in rows: if isinstance(row, LogChunk): lines = self.log_lines.setdefault(row.logid, []) # make sure there are enough slots in the list if len(lines) < row.last_line + 1: lines.append([None] * (row.last_line + 1 - len(lines))) row_lines = row.content.decode('utf-8').split('\n') lines[row.first_line:row.last_line + 1] = row_lines # component methods def _newId(self): id = 100 while id in self.logs: id += 1 return id def _row2dict(self, row): return dict( id=row['id'], stepid=row['stepid'], name=row['name'], slug=row['slug'], complete=bool(row['complete']), num_lines=row['num_lines'], type=row['type']) def getLog(self, logid): row = self.logs.get(logid) if not row: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getLogBySlug(self, stepid, slug): row = None for row in self.logs.values(): if row['slug'] == slug and row['stepid'] == stepid: break else: return defer.succeed(None) return defer.succeed(self._row2dict(row)) def getLogs(self, stepid=None): return defer.succeed([ self._row2dict(row) for row in self.logs.values() if row['stepid'] == stepid]) def getLogLines(self, logid, first_line, last_line): if logid not in self.logs or first_line > last_line: return defer.succeed('') lines = self.log_lines.get(logid, []) rv = lines[first_line:last_line + 1] return defer.succeed('\n'.join(rv) + '\n' if rv else '') def addLog(self, stepid, name, slug, type): id = self._newId() self.logs[id] = dict(id=id, stepid=stepid, name=name, slug=slug, type=type, complete=0, num_lines=0) self.log_lines[id] = [] return defer.succeed(id) def appendLog(self, logid, content): validation.verifyType(self.t, 'logid', logid, validation.IntValidator()) validation.verifyType(self.t, 'content', content, validation.StringValidator()) self.t.assertEqual(content[-1], '\n') content = content[:-1].split('\n') lines = self.log_lines[logid] lines.extend(content) num_lines = self.logs[logid]['num_lines'] = len(lines) return defer.succeed((num_lines - len(content), num_lines - 1)) def finishLog(self, logid): if id in self.logs: self.logs['id'].complete = 1 return defer.succeed(None) def compressLog(self, logid, force=False): return defer.succeed(None) def deleteOldLogChunks(self, older_than_timestamp): # not implemented self._deleted = older_than_timestamp return defer.succeed(1) class FakeUsersComponent(FakeDBComponent): def setUp(self): self.users = {} self.users_info = {} self.id_num = 0 def insertTestData(self, rows): for row in rows: if isinstance(row, User): self.users[row.uid] = dict(identifier=row.identifier, bb_username=row.bb_username, bb_password=row.bb_password) if isinstance(row, UserInfo): assert row.uid in self.users if row.uid not in self.users_info: self.users_info[row.uid] = [dict(attr_type=row.attr_type, attr_data=row.attr_data)] else: self.users_info[row.uid].append( dict(attr_type=row.attr_type, attr_data=row.attr_data)) def _user2dict(self, uid): usdict = None if uid in self.users: usdict = self.users[uid] if uid in self.users_info: infos = self.users_info[uid] for attr in infos: usdict[attr['attr_type']] = attr['attr_data'] usdict['uid'] = uid return usdict def nextId(self): self.id_num += 1 return self.id_num # component methods def findUserByAttr(self, identifier, attr_type, attr_data): for uid in self.users_info: attrs = self.users_info[uid] for attr in attrs: if (attr_type == attr['attr_type'] and attr_data == attr['attr_data']): return defer.succeed(uid) uid = self.nextId() self.db.insertTestData([User(uid=uid, identifier=identifier)]) self.db.insertTestData([UserInfo(uid=uid, attr_type=attr_type, attr_data=attr_data)]) return defer.succeed(uid) def getUser(self, uid): usdict = None if uid in self.users: usdict = self._user2dict(uid) return defer.succeed(usdict) def getUserByUsername(self, username): usdict = None for uid in self.users: user = self.users[uid] if user['bb_username'] == username: usdict = self._user2dict(uid) return defer.succeed(usdict) def updateUser(self, uid=None, identifier=None, bb_username=None, bb_password=None, attr_type=None, attr_data=None): assert uid is not None if identifier is not None: self.users[uid]['identifier'] = identifier if bb_username is not None: assert bb_password is not None try: user = self.users[uid] user['bb_username'] = bb_username user['bb_password'] = bb_password except KeyError: pass if attr_type is not None: assert attr_data is not None try: infos = self.users_info[uid] for attr in infos: if attr_type == attr['attr_type']: attr['attr_data'] = attr_data break else: infos.append(dict(attr_type=attr_type, attr_data=attr_data)) except KeyError: pass return defer.succeed(None) def removeUser(self, uid): if uid in self.users: self.users.pop(uid) self.users_info.pop(uid) return defer.succeed(None) def identifierToUid(self, identifier): for uid in self.users: if identifier == self.users[uid]['identifier']: return defer.succeed(uid) return defer.succeed(None) class FakeMastersComponent(FakeDBComponent): data2db = {"masterid": "id", "link": "id"} def setUp(self): self.masters = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Master): self.masters[row.id] = dict( id=row.id, name=row.name, active=bool(row.active), last_active=epoch2datetime(row.last_active)) def findMasterId(self, name): for m in self.masters.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.masters) + 1 self.masters[id] = dict( id=id, name=name, active=False, last_active=epoch2datetime(self.reactor.seconds())) return defer.succeed(id) def setMasterState(self, masterid, active): if masterid in self.masters: was_active = self.masters[masterid]['active'] self.masters[masterid]['active'] = active if active: self.masters[masterid]['last_active'] = \ epoch2datetime(self.reactor.seconds()) return defer.succeed(bool(was_active) != bool(active)) else: return defer.succeed(False) def getMaster(self, masterid): if masterid in self.masters: return defer.succeed(self.masters[masterid]) return defer.succeed(None) def getMasters(self): return defer.succeed(sorted(self.masters.values(), key=lambda x: x['id'])) # test helpers def markMasterInactive(self, masterid): if masterid in self.masters: self.masters[masterid]['active'] = False return defer.succeed(None) class FakeBuildersComponent(FakeDBComponent): def setUp(self): self.builders = {} self.builder_masters = {} self.builders_tags = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Builder): self.builders[row.id] = dict( id=row.id, name=row.name, description=row.description) if isinstance(row, BuilderMaster): self.builder_masters[row.id] = \ (row.builderid, row.masterid) if isinstance(row, BuildersTags): assert row.builderid in self.builders self.builders_tags.setdefault(row.builderid, []).append(row.tagid) def findBuilderId(self, name, autoCreate=True): for m in self.builders.values(): if m['name'] == name: return defer.succeed(m['id']) if not autoCreate: return defer.succeed(None) id = len(self.builders) + 1 self.builders[id] = dict( id=id, name=name, description=None, tags=[]) return defer.succeed(id) def addBuilderMaster(self, builderid=None, masterid=None): if (builderid, masterid) not in list(self.builder_masters.values()): self.insertTestData([ BuilderMaster(builderid=builderid, masterid=masterid), ]) return defer.succeed(None) def removeBuilderMaster(self, builderid=None, masterid=None): for id, tup in self.builder_masters.items(): if tup == (builderid, masterid): del self.builder_masters[id] break return defer.succeed(None) def getBuilder(self, builderid): if builderid in self.builders: masterids = [bm[1] for bm in self.builder_masters.values() if bm[0] == builderid] bldr = self.builders[builderid].copy() bldr['masterids'] = sorted(masterids) return defer.succeed(self._row2dict(bldr)) return defer.succeed(None) def getBuilders(self, masterid=None): rv = [] for builderid, bldr in self.builders.items(): masterids = [bm[1] for bm in self.builder_masters.values() if bm[0] == builderid] bldr = bldr.copy() bldr['masterids'] = sorted(masterids) rv.append(self._row2dict(bldr)) if masterid is not None: rv = [bd for bd in rv if masterid in bd['masterids']] return defer.succeed(rv) def addTestBuilder(self, builderid, name=None): if name is None: name = "SomeBuilder-%d" % builderid self.db.insertTestData([ Builder(id=builderid, name=name), ]) @defer.inlineCallbacks def updateBuilderInfo(self, builderid, description, tags): if builderid in self.builders: tags = tags if tags else [] self.builders[builderid]['description'] = description # add tags tagids = [] for tag in tags: if not isinstance(tag, type(1)): tag = yield self.db.tags.findTagId(tag) tagids.append(tag) self.builders_tags[builderid] = tagids def _row2dict(self, row): row = row.copy() row['tags'] = [self.db.tags.tags[tagid]['name'] for tagid in self.builders_tags.get(row['id'], [])] return row class FakeTagsComponent(FakeDBComponent): def setUp(self): self.tags = {} def insertTestData(self, rows): for row in rows: if isinstance(row, Tag): self.tags[row.id] = dict( id=row.id, name=row.name) def findTagId(self, name): for m in self.tags.values(): if m['name'] == name: return defer.succeed(m['id']) id = len(self.tags) + 1 self.tags[id] = dict( id=id, name=name) return defer.succeed(id) class FakeDBConnector(service.AsyncMultiService): """ A stand-in for C{master.db} that operates without an actual database backend. This also implements a test-data interface similar to the L{buildbot.test.util.db.RealDatabaseMixin.insertTestData} method. The child classes implement various useful assertions and faking methods; see their documentation for more. """ def __init__(self, testcase): super().__init__() # reset the id generator, for stable id's Row._next_id = 1000 self.t = testcase self.checkForeignKeys = False self._components = [] self.changes = comp = FakeChangesComponent(self, testcase) self._components.append(comp) self.changesources = comp = FakeChangeSourcesComponent(self, testcase) self._components.append(comp) self.schedulers = comp = FakeSchedulersComponent(self, testcase) self._components.append(comp) self.sourcestamps = comp = FakeSourceStampsComponent(self, testcase) self._components.append(comp) self.buildsets = comp = FakeBuildsetsComponent(self, testcase) self._components.append(comp) self.workers = comp = FakeWorkersComponent(self, testcase) self._components.append(comp) self.state = comp = FakeStateComponent(self, testcase) self._components.append(comp) self.buildrequests = comp = FakeBuildRequestsComponent(self, testcase) self._components.append(comp) self.builds = comp = FakeBuildsComponent(self, testcase) self._components.append(comp) self.steps = comp = FakeStepsComponent(self, testcase) self._components.append(comp) self.logs = comp = FakeLogsComponent(self, testcase) self._components.append(comp) self.users = comp = FakeUsersComponent(self, testcase) self._components.append(comp) self.masters = comp = FakeMastersComponent(self, testcase) self._components.append(comp) self.builders = comp = FakeBuildersComponent(self, testcase) self._components.append(comp) self.tags = comp = FakeTagsComponent(self, testcase) self._components.append(comp) def setup(self): self.is_setup = True return defer.succeed(None) def insertTestData(self, rows): """Insert a list of Row instances into the database; this method can be called synchronously or asynchronously (it completes immediately) """ for row in rows: if self.checkForeignKeys: row.checkForeignKeys(self, self.t) for comp in self._components: comp.insertTestData([row]) return defer.succeed(None) buildbot-2.6.0/master/buildbot/test/fake/fakemaster.py000066400000000000000000000151141361162603000230030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import weakref import mock from twisted.internet import defer from twisted.internet import reactor from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot.status import build from buildbot.test.fake import bworkermanager from buildbot.test.fake import fakedata from buildbot.test.fake import fakedb from buildbot.test.fake import fakemq from buildbot.test.fake import pbmanager from buildbot.test.fake.botmaster import FakeBotMaster from buildbot.test.fake.machine import FakeMachineManager from buildbot.util import service class FakeCache: """Emulate an L{AsyncLRUCache}, but without any real caching. This I{does} do the weakref part, to catch un-weakref-able objects.""" def __init__(self, name, miss_fn): self.name = name self.miss_fn = miss_fn def get(self, key, **kwargs): d = self.miss_fn(key, **kwargs) @d.addCallback def mkref(x): if x is not None: weakref.ref(x) return x return d def put(self, key, val): pass class FakeCaches: def get_cache(self, name, miss_fn): return FakeCache(name, miss_fn) class FakeStatus(service.BuildbotService): name = "status" lastBuilderStatus = None def builderAdded(self, name, basedir, tags=None, description=None): bs = FakeBuilderStatus(self.master) self.lastBuilderStatus = bs return bs def getBuilderNames(self): return [] def getWorkerNames(self): return [] def workerConnected(self, name): pass def build_started(self, brid, buildername, build_status): pass def getURLForBuild(self, builder_name, build_number): return "URLForBuild/%s/%d" % (builder_name, build_number) def getURLForBuildrequest(self, buildrequestid): return "URLForBuildrequest/%d" % (buildrequestid,) def subscribe(self, _): pass def getTitle(self): return "myBuildbot" def getURLForThing(self, _): return "h://thing" def getBuildbotURL(self): return "h://bb.me" @implementer(interfaces.IBuilderStatus) class FakeBuilderStatus: def __init__(self, master=None, buildername="Builder"): if master: self.master = master self.botmaster = master.botmaster self.basedir = os.path.join(master.basedir, 'bldr') self.lastBuildStatus = None self._tags = None self.name = buildername def setDescription(self, description): self._description = description def getDescription(self): return self._description def getTags(self): return self._tags def setTags(self, tags): self._tags = tags def matchesAnyTag(self, tags): return set(self._tags) & set(tags) def setWorkernames(self, names): pass def setCacheSize(self, size): pass def setBigState(self, state): pass def newBuild(self): bld = build.BuildStatus(self, self.master, 3) self.lastBuildStatus = bld return bld def buildStarted(self, builderStatus): pass class FakeLogRotation: rotateLength = 42 maxRotatedFiles = 42 class FakeMaster(service.MasterService): """ Create a fake Master instance: a Mock with some convenience implementations: - Non-caching implementation for C{self.caches} """ def __init__(self, reactor, master_id=fakedb.FakeBuildRequestsComponent.MASTER_ID): super().__init__() self._master_id = master_id self.reactor = reactor self.objectids = {} self.config = config.MasterConfig() self.caches = FakeCaches() self.pbmanager = pbmanager.FakePBManager() self.initLock = defer.DeferredLock() self.basedir = 'basedir' self.botmaster = FakeBotMaster() self.botmaster.setServiceParent(self) self.status = FakeStatus() self.status.setServiceParent(self) self.name = 'fake:/master' self.masterid = master_id self.workers = bworkermanager.FakeWorkerManager() self.workers.setServiceParent(self) self.machine_manager = FakeMachineManager() self.machine_manager.setServiceParent(self) self.log_rotation = FakeLogRotation() self.db = mock.Mock() self.next_objectid = 0 self.config_version = 0 def getObjectId(sched_name, class_name): k = (sched_name, class_name) try: rv = self.objectids[k] except KeyError: rv = self.objectids[k] = self.next_objectid self.next_objectid += 1 return defer.succeed(rv) self.db.state.getObjectId = getObjectId def getObjectId(self): return defer.succeed(self._master_id) def subscribeToBuildRequests(self, callback): pass # Leave this alias, in case we want to add more behavior later def make_master(testcase, wantMq=False, wantDb=False, wantData=False, wantRealReactor=False, url=None, **kwargs): if wantRealReactor: _reactor = reactor else: assert testcase is not None, "need testcase for fake reactor" # The test case must inherit from TestReactorMixin and setup it. _reactor = testcase.reactor master = FakeMaster(_reactor, **kwargs) if url: master.buildbotURL = url if wantData: wantMq = wantDb = True if wantMq: assert testcase is not None, "need testcase for wantMq" master.mq = fakemq.FakeMQConnector(testcase) master.mq.setServiceParent(master) if wantDb: assert testcase is not None, "need testcase for wantDb" master.db = fakedb.FakeDBConnector(testcase) master.db.setServiceParent(master) if wantData: master.data = fakedata.FakeDataConnector(master, testcase) return master buildbot-2.6.0/master/buildbot/test/fake/fakemq.py000066400000000000000000000073121361162603000221260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.mq import base from buildbot.test.util import validation from buildbot.util import service from buildbot.util import tuplematch class FakeMQConnector(service.AsyncMultiService, base.MQBase): # a fake connector that doesn't actually bridge messages from production to # consumption, and thus doesn't do any topic handling or persistence # note that this *does* verify all messages sent and received, unless this # is set to false: verifyMessages = True def __init__(self, testcase): super().__init__() self.testcase = testcase self.setup_called = False self.productions = [] self.qrefs = [] def setup(self): self.setup_called = True return defer.succeed(None) def produce(self, routingKey, data): self.testcase.assertIsInstance(routingKey, tuple) # XXX this is incompatible with the new scheme of sending multiple messages, # since the message type is no longer encoded by the first element of the # routing key # if self.verifyMessages: # validation.verifyMessage(self.testcase, routingKey, data) if any(not isinstance(k, str) for k in routingKey): raise AssertionError("%s is not all str" % (routingKey,)) self.productions.append((routingKey, data)) # note - no consumers are called: IT'S A FAKE def callConsumer(self, routingKey, msg): if self.verifyMessages: validation.verifyMessage(self.testcase, routingKey, msg) matched = False for q in self.qrefs: if tuplematch.matchTuple(routingKey, q.filter): matched = True q.callback(routingKey, msg) if not matched: raise AssertionError("no consumer found") def startConsuming(self, callback, filter, persistent_name=None): if any(not isinstance(k, str) and k is not None for k in filter): raise AssertionError("%s is not a filter" % (filter,)) qref = FakeQueueRef() qref.qrefs = self.qrefs qref.callback = callback qref.filter = filter qref.persistent_name = persistent_name self.qrefs.append(qref) return defer.succeed(qref) def clearProductions(self): "Clear out the cached productions" self.productions = [] def assertProductions(self, exp, orderMatters=True): """Assert that the given messages have been produced, then flush the list of produced messages. If C{orderMatters} is false, then the messages are sorted first; use this in cases where the messages must all be produced, but the order is not specified. """ if orderMatters: self.testcase.assertEqual(self.productions, exp) else: self.testcase.assertEqual(sorted(self.productions), sorted(exp)) self.productions = [] class FakeQueueRef: def stopConsuming(self): if self in self.qrefs: self.qrefs.remove(self) buildbot-2.6.0/master/buildbot/test/fake/fakeprotocol.py000066400000000000000000000062071361162603000233540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import subscription from buildbot.worker.protocols import base class FakeTrivialConnection: info = {} def __init__(self): self._disconnectSubs = subscription.SubscriptionPoint("disconnections from Fake") def waitShutdown(self): return defer.succeed(None) def notifyOnDisconnect(self, cb): return self._disconnectSubs.subscribe(cb) def waitForNotifyDisconnectedDelivered(self): return self._disconnectSubs.waitForDeliveriesToFinish() def notifyDisconnected(self): self._disconnectSubs.deliver() def loseConnection(self): self.notifyDisconnected() def remoteSetBuilderList(self, builders): return defer.succeed(None) class FakeConnection(base.Connection): def __init__(self, master, worker): super().__init__(master, worker) self._connected = True self.remoteCalls = [] self.builders = {} # { name : isBusy } # users of the fake can add to this as desired self.info = { 'worker_commands': [], 'version': '0.9.0', 'basedir': '/w', 'system': 'nt', } def loseConnection(self): self.notifyDisconnected() def remotePrint(self, message): self.remoteCalls.append(('remotePrint', message)) return defer.succeed(None) def remoteGetWorkerInfo(self): self.remoteCalls.append(('remoteGetWorkerInfo',)) return defer.succeed(self.info) def remoteSetBuilderList(self, builders): self.remoteCalls.append(('remoteSetBuilderList', builders[:])) self.builders = dict((b, False) for b in builders) return defer.succeed(None) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): self.remoteCalls.append(('remoteStartCommand', remoteCommand, builderName, commandId, commandName, args)) return defer.succeed(None) def remoteShutdown(self): self.remoteCalls.append(('remoteShutdown',)) return defer.succeed(None) def remoteStartBuild(self, builderName): self.remoteCalls.append(('remoteStartBuild', builderName)) return defer.succeed(None) def remoteInterruptCommand(self, builderName, commandId, why): self.remoteCalls.append( ('remoteInterruptCommand', builderName, commandId, why)) return defer.succeed(None) buildbot-2.6.0/master/buildbot/test/fake/fakestats.py000066400000000000000000000050361361162603000226500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import buildstep from buildbot.process.results import SUCCESS from buildbot.statistics import capture from buildbot.statistics import stats_service from buildbot.statistics.storage_backends.base import StatsStorageBase class FakeStatsStorageService(StatsStorageBase): """ Fake Storage service used in unit tests """ def __init__(self, stats=None, name='FakeStatsStorageService'): self.stored_data = [] if not stats: self.stats = [capture.CaptureProperty("TestBuilder", 'test')] else: self.stats = stats self.name = name self.captures = [] @defer.inlineCallbacks def thd_postStatsValue(self, post_data, series_name, context=None): if not context: context = {} self.stored_data.append((post_data, series_name, context)) yield defer.succeed(None) class FakeBuildStep(buildstep.BuildStep): """ A fake build step to be used for testing. """ def doSomething(self): self.setProperty("test", 10, "test") def start(self): self.doSomething() return SUCCESS class FakeStatsService(stats_service.StatsService): """ Fake StatsService for use in fakemaster """ def __init__(self, master=None, *args, **kwargs): super().__init__(*args, **kwargs) self.master = master @property def master(self): return self._master @master.setter def master(self, value): self._master = value class FakeInfluxDBClient: """ Fake Influx module for testing on systems that don't have influxdb installed. """ def __init__(self, *args, **kwargs): self.points = [] def write_points(self, points): self.points.extend(points) buildbot-2.6.0/master/buildbot/test/fake/httpclientservice.py000066400000000000000000000137321361162603000244240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json as jsonmodule import mock from twisted.internet import defer from zope.interface import implementer from buildbot.interfaces import IHttpResponse from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.util.logger import Logger log = Logger() @implementer(IHttpResponse) class ResponseWrapper: def __init__(self, code, content): self._content = content self._code = code def content(self): content = unicode2bytes(self._content) return defer.succeed(content) def json(self): return defer.succeed(jsonmodule.loads(self._content)) @property def code(self): return self._code class HTTPClientService(service.SharedService): """A SharedService class that fakes http requests for buildbot http service testing. It is called HTTPClientService so that it substitute the real HTTPClientService if created earlier in the test. getName from the fake and getName from the real must return the same values. """ quiet = False def __init__(self, base_url, auth=None, headers=None, debug=None, verify=None): assert not base_url.endswith("/"), "baseurl should not end with /" super().__init__() self._base_url = base_url self._auth = auth self._headers = headers self._session = None self._expected = [] def updateHeaders(self, headers): if self._headers is None: self._headers = {} self._headers.update(headers) @classmethod def getFakeService(cls, master, case, *args, **kwargs): ret = cls.getService(master, *args, **kwargs) def assertNotCalled(self, *_args, **_kwargs): case.fail(("HTTPClientService called with *{!r}, **{!r}" "while should be called *{!r} **{!r}").format( _args, _kwargs, args, kwargs)) case.patch(httpclientservice.HTTPClientService, "__init__", assertNotCalled) @ret.addCallback def assertNoOutstanding(fake): fake.case = case case.addCleanup(fake.assertNoOutstanding) return fake return ret # tests should ensure this has been called checkAvailable = mock.Mock() def expect(self, method, ep, params=None, data=None, json=None, code=200, content=None, content_json=None, files=None): if content is not None and content_json is not None: return ValueError("content and content_json cannot be both specified") if content_json is not None: content = jsonmodule.dumps(content_json, default=toJson) self._expected.append(dict( method=method, ep=ep, params=params, data=data, json=json, code=code, content=content, files=files)) def assertNoOutstanding(self): self.case.assertEqual(0, len(self._expected), "expected more http requests:\n {!r}".format(self._expected)) def _doRequest(self, method, ep, params=None, data=None, json=None, files=None, timeout=None): assert ep == "" or ep.startswith("/"), "ep should start with /: " + ep if not self.quiet: log.debug("{method} {ep} {params!r} <- {data!r}", method=method, ep=ep, params=params, data=data or json) if json is not None: # ensure that the json is really jsonable jsonmodule.dumps(json, default=toJson) if files is not None: files = dict((k, v.read()) for (k, v) in files.items()) if not self._expected: raise AssertionError( "Not expecting a request, while we got: " "method={!r}, ep={!r}, params={!r}, data={!r}, json={!r}, files={!r}".format( method, ep, params, data, json, files)) expect = self._expected.pop(0) # pylint: disable=too-many-boolean-expressions if (expect['method'] != method or expect['ep'] != ep or expect['params'] != params or expect['data'] != data or expect['json'] != json or expect['files'] != files): raise AssertionError( "expecting:\n" "method={!r}, ep={!r}, params={!r}, data={!r}, json={!r}, files={!r}\n" "got :\n" "method={!r}, ep={!r}, params={!r}, data={!r}, json={!r}, files={!r}".format( expect['method'], expect['ep'], expect['params'], expect['data'], expect['json'], expect['files'], method, ep, params, data, json, files, )) if not self.quiet: log.debug("{method} {ep} -> {code} {content!r}", method=method, ep=ep, code=expect['code'], content=expect['content']) return defer.succeed(ResponseWrapper(expect['code'], expect['content'])) # lets be nice to the auto completers, and don't generate that code def get(self, ep, **kwargs): return self._doRequest('get', ep, **kwargs) def put(self, ep, **kwargs): return self._doRequest('put', ep, **kwargs) def delete(self, ep, **kwargs): return self._doRequest('delete', ep, **kwargs) def post(self, ep, **kwargs): return self._doRequest('post', ep, **kwargs) buildbot-2.6.0/master/buildbot/test/fake/kube.py000066400000000000000000000045711361162603000216140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import time from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util.kubeclientservice import KubeError class KubeClientService(fakehttpclientservice.HTTPClientService): def __init__(self, kube_config=None, *args, **kwargs): c = kube_config.getConfig() super().__init__(c['master_url'], *args, **kwargs) self.namespace = c['namespace'] self.addService(kube_config) self.pods = {} def createPod(self, namespace, spec): if 'metadata' not in spec: raise KubeError({ 'message': 'Pod "" is invalid: metadata.name: Required value: name or generateName is required'}) name = spec['metadata']['name'] pod = { 'kind': 'Pod', 'metadata': copy.copy(spec['metadata']), 'spec': copy.deepcopy(spec['spec']) } self.pods[namespace + '/' + name] = pod return pod def deletePod(self, namespace, name, graceperiod=0): if namespace + '/' + name not in self.pods: raise KubeError({ 'message': 'Pod not found', 'reason': 'NotFound'}) spec = self.pods[namespace + '/' + name] del self.pods[namespace + '/' + name] spec['metadata']['deletionTimestamp'] = time.ctime(time.time()) return spec def waitForPodDeletion(self, namespace, name, timeout): if namespace + '/' + name in self.pods: raise TimeoutError("Did not see pod {name} terminate after {timeout}s".format( name=name, timeout=timeout )) return { 'kind': 'Status', 'reason': 'NotFound' } buildbot-2.6.0/master/buildbot/test/fake/latent.py000066400000000000000000000162141361162603000221520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import enum from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.trial.unittest import SkipTest from buildbot.test.fake.worker import SeverWorkerConnectionMixin from buildbot.worker import AbstractLatentWorker try: from buildbot_worker.bot import LocalWorker as RemoteWorker from buildbot_worker.base import BotBase except ImportError: RemoteWorker = None class States(enum.Enum): STOPPED = 0 STARTING = 1 STARTED = 2 STOPPING = 3 class LatentController(SeverWorkerConnectionMixin): """ A controller for ``ControllableLatentWorker``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html Note that by default workers will connect automatically if True is passed to start_instance(). Also by default workers will disconnect automatically just as stop_instance() is executed. """ def __init__(self, case, name, kind=None, build_wait_timeout=600, starts_without_substantiate=None, **kwargs): self.case = case self.build_wait_timeout = build_wait_timeout self.worker = ControllableLatentWorker(name, self, **kwargs) self.remote_worker = None if starts_without_substantiate is not None: self.worker.starts_without_substantiate = \ starts_without_substantiate self.state = States.STOPPED self.auto_stop_flag = False self.auto_start_flag = False self.auto_connect_worker = True self.auto_disconnect_worker = True self.kind = kind self._started_kind = None self._started_kind_deferred = None @property def starting(self): return self.state == States.STARTING @property def started(self): return self.state == States.STARTED @property def stopping(self): return self.state == States.STOPPING @property def stopped(self): return self.state == States.STOPPED def auto_start(self, result): self.auto_start_flag = result if self.auto_start_flag and self.state == States.STARTING: self.start_instance(True) def start_instance(self, result): self.do_start_instance(result) d, self._start_deferred = self._start_deferred, None d.callback(result) def do_start_instance(self, result): assert self.state == States.STARTING self.state = States.STARTED if self.auto_connect_worker and result is True: self.connect_worker() @defer.inlineCallbacks def auto_stop(self, result): self.auto_stop_flag = result if self.auto_stop_flag and self.state == States.STOPPING: yield self.stop_instance(True) @defer.inlineCallbacks def stop_instance(self, result): yield self.do_stop_instance() d, self._stop_deferred = self._stop_deferred, None d.callback(result) @defer.inlineCallbacks def do_stop_instance(self): assert self.state == States.STOPPING self.state = States.STOPPED self._started_kind = None if self.auto_disconnect_worker: yield self.disconnect_worker() def connect_worker(self): if self.remote_worker is not None: return if RemoteWorker is None: raise SkipTest("buildbot-worker package is not installed") workdir = FilePath(self.case.mktemp()) workdir.createDirectory() self.remote_worker = RemoteWorker(self.worker.name, workdir.path, False) self.remote_worker.setServiceParent(self.worker) def disconnect_worker(self): super().disconnect_worker() if self.remote_worker is None: return self.worker.conn, conn = None, self.worker.conn self.remote_worker, worker = None, self.remote_worker # LocalWorker does actually disconnect, so we must force disconnection # via detached. Note that the worker may have already detached if conn is not None: conn.loseConnection() return worker.disownServiceParent() def setup_kind(self, build): if build: self._started_kind_deferred = build.render(self.kind) else: self._started_kind_deferred = self.kind @defer.inlineCallbacks def get_started_kind(self): if self._started_kind_deferred: self._started_kind = yield self._started_kind_deferred self._started_kind_deferred = None return self._started_kind def patchBot(self, case, remoteMethod, patch): case.patch(BotBase, remoteMethod, patch) class ControllableLatentWorker(AbstractLatentWorker): """ A latent worker that can be controlled by tests. """ builds_may_be_incompatible = True def __init__(self, name, controller, **kwargs): self._controller = controller AbstractLatentWorker.__init__(self, name, None, **kwargs) def checkConfig(self, name, _, **kwargs): AbstractLatentWorker.checkConfig( self, name, None, build_wait_timeout=self._controller.build_wait_timeout, **kwargs) def reconfigService(self, name, _, **kwargs): AbstractLatentWorker.reconfigService( self, name, None, build_wait_timeout=self._controller.build_wait_timeout, **kwargs) @defer.inlineCallbacks def isCompatibleWithBuild(self, build_props): if self._controller.state == States.STOPPED: return True requested_kind = yield build_props.render((self._controller.kind)) curr_kind = yield self._controller.get_started_kind() return requested_kind == curr_kind def start_instance(self, build): self._controller.setup_kind(build) assert self._controller.state == States.STOPPED self._controller.state = States.STARTING if self._controller.auto_start_flag: self._controller.do_start_instance(True) return defer.succeed(True) self._controller._start_deferred = defer.Deferred() return self._controller._start_deferred @defer.inlineCallbacks def stop_instance(self, fast): assert self._controller.state == States.STARTED self._controller.state = States.STOPPING if self._controller.auto_stop_flag: yield self._controller.do_stop_instance() return True self._controller._stop_deferred = defer.Deferred() return (yield self._controller._stop_deferred) buildbot-2.6.0/master/buildbot/test/fake/libvirt.py000066400000000000000000000032671361162603000223420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class Domain: def __init__(self, name, conn): self.conn = conn self._name = name self.running = False def name(self): return self._name def create(self): self.running = True def shutdown(self): self.running = False def destroy(self): self.running = False del self.conn[self._name] class Connection: def __init__(self, uri): self.uri = uri self.domains = {} def createXML(self, xml, flags): # FIXME: This should really parse the name out of the xml, i guess d = self.fake_add("instance") d.running = True return d def listDomainsID(self): return list(self.domains) def lookupByName(self, name): return self.domains[name] def lookupByID(self, ID): return self.domains[ID] def fake_add(self, name): d = Domain(name, self) self.domains[name] = d return d def open(uri): return Connection(uri) buildbot-2.6.0/master/buildbot/test/fake/logfile.py000066400000000000000000000056451361162603000223120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot import util from buildbot.util import lineboundaries class FakeLogFile: def __init__(self, name, step): self.name = name self.header = '' self.stdout = '' self.stderr = '' self.lbfs = {} self.finished = False self.step = step self.subPoint = util.subscription.SubscriptionPoint("%r log" % (name,)) def getName(self): return self.name def subscribe(self, callback): log.msg("NOTE: fake logfile subscription never produces anything") return self.subPoint.subscribe(callback) def _getLbf(self, stream, meth): try: return self.lbfs[stream] except KeyError: def wholeLines(lines): if not isinstance(lines, str): lines = lines.decode('utf-8') if self.name in self.step.logobservers: for obs in self.step.logobservers[self.name]: getattr(obs, meth)(lines) lbf = self.lbfs[stream] = \ lineboundaries.LineBoundaryFinder(wholeLines) return lbf def addHeader(self, text): self.header += text self._getLbf('h', 'headerReceived').append(text) return defer.succeed(None) def addStdout(self, text): self.stdout += text self._getLbf('o', 'outReceived').append(text) return defer.succeed(None) def addStderr(self, text): self.stderr += text self._getLbf('e', 'errReceived').append(text) return defer.succeed(None) def isFinished(self): return self.finished def waitUntilFinished(self): log.msg("NOTE: fake waitUntilFinished doesn't actually wait") return defer.Deferred() def flushFakeLogfile(self): for lbf in self.lbfs.values(): lbf.flush() def finish(self): self.flushFakeLogfile() self.finished = True return defer.succeed(None) def fakeData(self, header='', stdout='', stderr=''): if header: self.header += header if stdout: self.stdout += stdout if stderr: self.stderr += stderr buildbot-2.6.0/master/buildbot/test/fake/machine.py000066400000000000000000000047151361162603000222720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.machine.latent import AbstractLatentMachine from buildbot.machine.latent import States as MachineStates from buildbot.util import service class FakeMachineManager(service.AsyncMultiService): name = 'MachineManager' @property def machines(self): return self.namedServices def getMachineByName(self, name): if name in self.machines: return self.machines[name] return None class LatentMachineController: """ A controller for ``ControllableLatentMachine`` """ def __init__(self, name, **kwargs): self.machine = ControllableLatentMachine(name, self, **kwargs) self._start_deferred = None self._stop_deferred = None def start_machine(self, result): assert self.machine.state == MachineStates.STARTING d, self._start_deferred = self._start_deferred, None if isinstance(result, Exception): d.errback(result) else: d.callback(result) def stop_machine(self, result=True): assert self.machine.state == MachineStates.STOPPING d, self._stop_deferred = self._stop_deferred, None if isinstance(result, Exception): d.errback(result) else: d.callback(result) class ControllableLatentMachine(AbstractLatentMachine): """ A latent machine that can be controlled by tests """ def __init__(self, name, controller, **kwargs): self._controller = controller super().__init__(name, **kwargs) def start_machine(self): d = defer.Deferred() self._controller._start_deferred = d return d def stop_machine(self): d = defer.Deferred() self._controller._stop_deferred = d return d buildbot-2.6.0/master/buildbot/test/fake/openstack.py000066400000000000000000000112741361162603000226530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import uuid ACTIVE = 'ACTIVE' BUILD = 'BUILD' DELETED = 'DELETED' ERROR = 'ERROR' UNKNOWN = 'UNKNOWN' TEST_UUIDS = { 'image': '28a65eb4-f354-4420-97dc-253b826547f7', 'volume': '65fbb9f1-c4d5-40a8-a233-ad47c52bb837', 'snapshot': 'ab89152d-3c26-4d30-9ae5-65b705f874b7', } class FakeNovaClient(): region_name = "" # Parts used from novaclient class Client(): def __init__(self, version, session): self.images = ItemManager() self.images._add_items([Image(TEST_UUIDS['image'], 'CirrOS 0.3.4', 13287936)]) self.volumes = ItemManager() self.volumes._add_items([Volume(TEST_UUIDS['volume'], 'CirrOS 0.3.4', 4)]) self.volume_snapshots = ItemManager() self.volume_snapshots._add_items([Snapshot(TEST_UUIDS['snapshot'], 'CirrOS 0.3.4', 2)]) self.servers = Servers() self.session = session self.client = FakeNovaClient() class ItemManager(): def __init__(self): self._items = {} def _add_items(self, new_items): for item in new_items: self._items[item.id] = item def list(self): return self._items.values() def get(self, uuid): if uuid in self._items: return self._items[uuid] else: raise NotFound # This exists because Image needs an attribute that isn't supported by # namedtuple. And once the base code is there might as well have Volume and # Snapshot use it too. class Item(): def __init__(self, id, name, size): self.id = id self.name = name self.size = size class Image(Item): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) setattr(self, 'OS-EXT-IMG-SIZE:size', self.size) class Volume(Item): pass class Snapshot(Item): pass class Servers(): fail_to_get = False fail_to_start = False gets_until_active = 3 gets_until_disappears = 1 instances = {} def create(self, *boot_args, **boot_kwargs): instance_id = uuid.uuid4() instance = Instance(instance_id, self, boot_args, boot_kwargs) self.instances[instance_id] = instance return instance def get(self, instance_id): if instance_id not in self.instances: raise NotFound inst = self.instances[instance_id] if not self.fail_to_get or inst.gets < self.gets_until_disappears: if not inst.status.startswith('BUILD'): return inst inst.gets += 1 if inst.gets >= self.gets_until_active: if not self.fail_to_start: inst.status = ACTIVE else: inst.status = ERROR return inst else: raise NotFound def delete(self, instance_id): if instance_id in self.instances: del self.instances[instance_id] # This is returned by Servers.create(). class Instance(): def __init__(self, id, servers, boot_args, boot_kwargs): self.id = id self.servers = servers self.boot_args = boot_args self.boot_kwargs = boot_kwargs self.gets = 0 self.status = 'BUILD(networking)' self.name = 'name' def delete(self): self.servers.delete(self.id) # Parts used from novaclient.exceptions. class NotFound(Exception): pass # Parts used from keystoneauth1. def get_plugin_loader(plugin_type): return PasswordLoader() class PasswordLoader(): def load_from_options(self, **kwargs): return PasswordAuth(**kwargs) class PasswordAuth(): def __init__(self, auth_url, password, project_name, username, user_domain_name=None, project_domain_name=None): self.auth_url = auth_url self.password = password self.project_name = project_name self.username = username self.user_domain_name = user_domain_name self.project_domain_name = project_domain_name class Session(): def __init__(self, auth): self.auth = auth buildbot-2.6.0/master/buildbot/test/fake/pbmanager.py000066400000000000000000000034521361162603000226170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service class FakePBManager(service.AsyncMultiService): def __init__(self): super().__init__() self.setName("fake-pbmanager") self._registrations = [] self._unregistrations = [] def register(self, portstr, username, password, pfactory): if (portstr, username) not in self._registrations: reg = FakeRegistration(self, portstr, username) self._registrations.append((portstr, username, password)) return defer.succeed(reg) else: raise KeyError("username '%s' is already registered on port %s" % (username, portstr)) def _unregister(self, portstr, username): self._unregistrations.append((portstr, username)) return defer.succeed(None) class FakeRegistration: def __init__(self, pbmanager, portstr, username): self._portstr = portstr self._username = username self._pbmanager = pbmanager def unregister(self): self._pbmanager._unregister(self._portstr, self._username) buildbot-2.6.0/master/buildbot/test/fake/private_tempdir.py000066400000000000000000000025661361162603000240660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os class FakePrivateTemporaryDirectory: def __init__(self, suffix=None, prefix=None, dir=None, mode=0o700): dir = dir or '/' prefix = prefix or '' suffix = suffix or '' self.name = os.path.join(dir, prefix + '@@@' + suffix) self.mode = mode def __enter__(self): return self.name def __exit__(self, exc, value, tb): pass def cleanup(self): pass class MockPrivateTemporaryDirectory: def __init__(self): self.dirs = [] def __call__(self, *args, **kwargs): ret = FakePrivateTemporaryDirectory(*args, **kwargs) self.dirs.append((ret.name, ret.mode)) return ret buildbot-2.6.0/master/buildbot/test/fake/reactor.py000066400000000000000000000136151361162603000223240ustar00rootroot00000000000000# Copyright Buildbot Team Members # Portions copyright 2015-2016 ClusterHQ Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet import defer from twisted.internet import reactor from twisted.internet.base import _ThreePhaseEvent from twisted.internet.interfaces import IReactorCore from twisted.internet.interfaces import IReactorThreads from twisted.internet.task import Clock from twisted.python import log from twisted.python.failure import Failure from zope.interface import implementer # The code here is based on the implementations in # https://twistedmatrix.com/trac/ticket/8295 # https://twistedmatrix.com/trac/ticket/8296 @implementer(IReactorCore) class CoreReactor: """ Partial implementation of ``IReactorCore``. """ def __init__(self): super().__init__() self._triggers = {} def addSystemEventTrigger(self, phase, eventType, f, *args, **kw): event = self._triggers.setdefault(eventType, _ThreePhaseEvent()) return eventType, event.addTrigger(phase, f, *args, **kw) def removeSystemEventTrigger(self, triggerID): eventType, handle = triggerID event = self._triggers.setdefault(eventType, _ThreePhaseEvent()) event.removeTrigger(handle) def fireSystemEvent(self, eventType): event = self._triggers.get(eventType) if event is not None: event.fireEvent() def callWhenRunning(self, f, *args, **kwargs): f(*args, **kwargs) class NonThreadPool: """ A stand-in for ``twisted.python.threadpool.ThreadPool`` so that the majority of the test suite does not need to use multithreading. This implementation takes the function call which is meant to run in a thread pool and runs it synchronously in the calling thread. :ivar int calls: The number of calls which have been dispatched to this object. """ calls = 0 def __init__(self, **kwargs): pass def callInThreadWithCallback(self, onResult, func, *args, **kw): self.calls += 1 try: result = func(*args, **kw) except: # noqa pylint: disable=bare-except # We catch *everything* here, since normally this code would be # running in a thread, where there is nothing that will catch # error. onResult(False, Failure()) else: onResult(True, result) def start(self): pass def stop(self): pass @implementer(IReactorThreads) class NonReactor: """ A partial implementation of ``IReactorThreads`` which fits into the execution model defined by ``NonThreadPool``. """ def callFromThread(self, f, *args, **kwargs): f(*args, **kwargs) def getThreadPool(self): return NonThreadPool() class TestReactor(NonReactor, CoreReactor, Clock): def __init__(self): super().__init__() # whether there are calls that should run right now self._pendingCurrentCalls = False self.stop_called = False def _executeCurrentDelayedCalls(self): while self.getDelayedCalls(): first = sorted(self.getDelayedCalls(), key=lambda a: a.getTime())[0] if first.getTime() > self.seconds(): break self.advance(0) self._pendingCurrentCalls = False @defer.inlineCallbacks def _catchPrintExceptions(self, what, *a, **kw): try: r = what(*a, **kw) if isinstance(r, defer.Deferred): yield r except Exception as e: log.msg('Unhandled exception from deferred when doing ' 'TestReactor.advance()', e) raise def callLater(self, when, what, *a, **kw): # Buildbot often uses callLater(0, ...) to defer execution of certain # code to the next iteration of the reactor. This means that often # there are pending callbacks registered to the reactor that might # block other code from proceeding unless the test reactor has an # iteration. To avoid deadlocks in tests we give the real reactor a # chance to advance the test reactor whenever we detect that there # are callbacks that should run in the next iteration of the test # reactor. # # Additionally, we wrap all calls with a function that prints any # unhandled exceptions if when <= 0 and not self._pendingCurrentCalls: reactor.callLater(0, self._executeCurrentDelayedCalls) return super().callLater(when, self._catchPrintExceptions, what, *a, **kw) def stop(self): # first fire pending calls until the current time. Note that the real # reactor only advances until the current time in the case of shutdown. self.advance(0) # then, fire the shutdown event self.fireSystemEvent('shutdown') self.stop_called = True buildbot-2.6.0/master/buildbot/test/fake/remotecommand.py000066400000000000000000000261641361162603000235220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import functools from twisted.internet import defer from twisted.python import failure from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.test.fake import logfile class FakeRemoteCommand: # callers should set this to the running TestCase instance testcase = None active = False def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): if decodeRC is None: decodeRC = {0: SUCCESS} # copy the args and set a few defaults self.remote_command = remote_command self.args = args.copy() self.logs = {} self.delayedLogs = {} self.rc = -999 self.collectStdout = collectStdout self.collectStderr = collectStderr self.updates = {} self.decodeRC = decodeRC self.stdioLogName = stdioLogName if collectStdout: self.stdout = '' if collectStderr: self.stderr = '' def run(self, step, conn, builder_name): # delegate back to the test case return self.testcase._remotecommand_run(self, step, conn, builder_name) def useLog(self, log_, closeWhenFinished=False, logfileName=None): if not logfileName: logfileName = log_.getName() self.logs[logfileName] = log_ def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): self.delayedLogs[logfileName] = (activateCallBack, closeWhenFinished) def interrupt(self, why): raise NotImplementedError def results(self): if self.rc in self.decodeRC: return self.decodeRC[self.rc] return FAILURE def didFail(self): return self.results() == FAILURE def fakeLogData(self, step, log, header='', stdout='', stderr=''): # note that this should not be used in the same test as useLog(Delayed) self.logs[log] = fakelog = logfile.FakeLogFile(log, step) fakelog.fakeData(header=header, stdout=stdout, stderr=stderr) def __repr__(self): return "FakeRemoteCommand(" + repr(self.remote_command) + "," + repr(self.args) + ")" class FakeRemoteShellCommand(FakeRemoteCommand): def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): if logfiles is None: logfiles = {} if decodeRC is None: decodeRC = {0: SUCCESS} args = dict(workdir=workdir, command=command, env=env or {}, want_stdout=want_stdout, want_stderr=want_stderr, initial_stdin=initialStdin, timeout=timeout, maxTime=maxTime, logfiles=logfiles, usePTY=usePTY, logEnviron=logEnviron) super().__init__("shell", args, collectStdout=collectStdout, collectStderr=collectStderr, decodeRC=decodeRC, stdioLogName=stdioLogName) class ExpectRemoteRef: """ Define an expected RemoteReference in the args to an L{Expect} class """ def __init__(self, rrclass): self.rrclass = rrclass def __eq__(self, other): return isinstance(other, self.rrclass) class Expect: """ Define an expected L{RemoteCommand}, with the same arguments Extra behaviors of the remote command can be added to the instance, using class methods. Use L{Expect.log} to add a logfile, L{Expect.update} to add an arbitrary update, or add an integer to specify the return code (rc), or add a Failure instance to raise an exception. Additionally, use L{Expect.behavior}, passing a callable that will be invoked with the real command and can do what it likes: def custom_behavior(command): ... Expect('somecommand', { args='foo' }) + Expect.behavior(custom_behavior), ... Expect('somecommand', { args='foo' }) + Expect.log('stdio', stdout='foo!') + Expect.log('config.log', stdout='some info') + Expect.update('status', 'running') + 0, # (specifies the rc) ... """ def __init__(self, remote_command, args, incomparable_args=None): """ Expect a command named C{remote_command}, with args C{args}. Any args in C{incomparable_args} are not cmopared, but must exist. """ if incomparable_args is None: incomparable_args = [] self.remote_command = remote_command self.incomparable_args = incomparable_args self.args = args self.result = None self.behaviors = [] @classmethod def behavior(cls, callable): """ Add an arbitrary behavior that is expected of this command. C{callable} will be invoked with the real command as an argument, and can do what it wishes. It will be invoked with maybeDeferred, in case the operation is asynchronous. """ return ('callable', callable) @classmethod def log(self, name, **streams): return ('log', name, streams) @classmethod def update(self, name, value): return ('update', name, value) def __add__(self, other): # special-case adding an integer (return code) or failure (error) if isinstance(other, int): self.behaviors.append(('rc', other)) elif isinstance(other, failure.Failure): self.behaviors.append(('err', other)) else: self.behaviors.append(other) return self def runBehavior(self, behavior, args, command): """ Implement the given behavior. Returns a Deferred. """ if behavior == 'rc': command.rc = args[0] d = defer.succeed(None) for log in command.logs.values(): if hasattr(log, 'unwrap'): # We're handling an old style log that was # used in an old style step. We handle the necessary # stuff to make the make sync/async log hack work. d.addCallback( functools.partial(lambda log, _: log.unwrap(), log)) d.addCallback(lambda l: l.flushFakeLogfile()) return d elif behavior == 'err': return defer.fail(args[0]) elif behavior == 'update': command.updates.setdefault(args[0], []).append(args[1]) elif behavior == 'log': name, streams = args if 'header' in streams: command.logs[name].addHeader(streams['header']) if 'stdout' in streams: command.logs[name].addStdout(streams['stdout']) if command.collectStdout: command.stdout += streams['stdout'] if 'stderr' in streams: command.logs[name].addStderr(streams['stderr']) if command.collectStderr: command.stderr += streams['stderr'] elif behavior == 'callable': return defer.maybeDeferred(lambda: args[0](command)) else: return defer.fail(failure.Failure( AssertionError('invalid behavior %s' % behavior))) return defer.succeed(None) @defer.inlineCallbacks def runBehaviors(self, command): """ Run all expected behaviors for this command """ for behavior in self.behaviors: yield self.runBehavior(behavior[0], behavior[1:], command) def expectationPassed(self, exp): """ Some expectations need to be able to distinguish pass/fail of nested expectations. This will get invoked once for every nested exception and once for self unless anything fails. Failures are passed to raiseExpectationFailure for handling. @param exp: The nested exception that passed or self. """ def raiseExpectationFailure(self, exp, failure): """ Some expectations may wish to suppress failure. The default expectation does not. This will get invoked if the expectations fails on a command. @param exp: the expectation that failed. this could be self or a nested exception """ raise failure def shouldAssertCommandEqualExpectation(self): """ Whether or not we should validate that the current command matches the expectation. Some expectations may not have a way to match a command. """ return True def shouldRunBehaviors(self): """ Whether or not, once the command matches the expectation, the behaviors should be run for this step. """ return True def shouldKeepMatchingAfter(self, command): """ Expectations are by default not kept matching multiple commands. Return True if you want to re-use a command for multiple commands. """ return False def nestedExpectations(self): """ Any sub-expectations that should be validated. """ return [] def __repr__(self): return "Expect(" + repr(self.remote_command) + ")" class ExpectShell(Expect): """ Define an expected L{RemoteShellCommand}, with the same arguments Any non-default arguments must be specified explicitly (e.g., usePTY). """ def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, initialStdin=None, timeout=20 * 60, maxTime=None, logfiles=None, usePTY=None, logEnviron=True): if env is None: env = {} if logfiles is None: logfiles = {} args = dict(workdir=workdir, command=command, env=env, want_stdout=want_stdout, want_stderr=want_stderr, initial_stdin=initialStdin, timeout=timeout, maxTime=maxTime, logfiles=logfiles, usePTY=usePTY, logEnviron=logEnviron) super().__init__("shell", args) def __repr__(self): return "ExpectShell(" + repr(self.remote_command) + repr(self.args['command']) + ")" buildbot-2.6.0/master/buildbot/test/fake/secrets.py000066400000000000000000000006241361162603000223310ustar00rootroot00000000000000 from buildbot.secrets.providers.base import SecretProviderBase class FakeSecretStorage(SecretProviderBase): name = "SecretsInFake" def reconfigService(self, secretdict=None): if secretdict is None: secretdict = {} self.allsecrets = secretdict def get(self, key): if key in self.allsecrets: return self.allsecrets[key] return None buildbot-2.6.0/master/buildbot/test/fake/state.py000066400000000000000000000017761361162603000220120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class State: """ A simple class you can use to keep track of state throughout a test. Just assign whatever you want to its attributes. Its constructor provides a shortcut to setting initial values for attributes """ def __init__(self, **kwargs): self.__dict__.update(kwargs) buildbot-2.6.0/master/buildbot/test/fake/step.py000066400000000000000000000042171361162603000216360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.buildstep import BuildStep from buildbot.process.results import CANCELLED class BuildStepController: """ A controller for ``ControllableBuildStep``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html """ def __init__(self, **kwargs): self.step = ControllableBuildStep(self, **kwargs) self.running = False self.auto_finish_results = None def finish_step(self, result): assert self.running self.running = False d, self._run_deferred = self._run_deferred, None d.callback(result) def auto_finish_step(self, result): self.auto_finish_results = result if self.running: self.finish_step(result) class ControllableBuildStep(BuildStep): """ A latent worker that can be controlled by tests. """ name = "controllableStep" def __init__(self, controller, **kwargs): super().__init__(**kwargs) self._controller = controller def run(self): if self._controller.auto_finish_results is not None: return defer.succeed(self._controller.auto_finish_results) assert not self._controller.running self._controller.running = True self._controller._run_deferred = defer.Deferred() return self._controller._run_deferred def interrupt(self, reason): self._controller.finish_step(CANCELLED) buildbot-2.6.0/master/buildbot/test/fake/web.py000066400000000000000000000066711361162603000214460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from io import BytesIO from mock import Mock from twisted.internet import defer from twisted.web import server from buildbot.test.fake import fakemaster def fakeMasterForHooks(testcase): # testcase must derive from TestReactorMixin and setUpTestReactor() # must be called before calling this function. master = fakemaster.make_master(testcase, wantData=True) master.www = Mock() return master class FakeRequest(Mock): """ A fake Twisted Web Request object, including some pointers to the buildmaster and an addChange method on that master which will append its arguments to self.addedChanges. """ written = b'' finished = False redirected_to = None failure = None def __init__(self, args=None, content=b''): super().__init__() if args is None: args = {} self.args = args self.content = BytesIO(content) self.site = Mock() self.site.buildbot_service = Mock() self.uri = b'/' self.prepath = [] self.method = b'GET' self.received_headers = {} self.deferred = defer.Deferred() def getHeader(self, key): return self.received_headers.get(key) def write(self, data): self.written = self.written + data def redirect(self, url): self.redirected_to = url def finish(self): self.finished = True self.deferred.callback(None) def processingFailed(self, f): self.deferred.errback(f) # work around http://code.google.com/p/mock/issues/detail?id=105 def _get_child_mock(self, **kw): return Mock(**kw) # cribed from twisted.web.test._util._render def test_render(self, resource): for arg in self.args: if not isinstance(arg, bytes): raise ValueError("self.args: {!r}, contains " "values which are not bytes".format(self.args)) if self.uri and not isinstance(self.uri, bytes): raise ValueError("self.uri: {!r} is {}, not bytes".format( self.uri, type(self.uri))) if self.method and not isinstance(self.method, bytes): raise ValueError("self.method: {!r} is {}, not bytes".format( self.method, type(self.method))) result = resource.render(self) if isinstance(result, bytes): self.write(result) self.finish() return self.deferred elif isinstance(result, str): raise ValueError("{!r} should return bytes, not {}: {!r}".format( resource.render, type(result), result)) elif result is server.NOT_DONE_YET: return self.deferred else: raise ValueError("Unexpected return value: {!r}".format(result)) buildbot-2.6.0/master/buildbot/test/fake/worker.py000066400000000000000000000125421361162603000221740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.spread import pb from twisted.trial.unittest import SkipTest from buildbot.process import properties from buildbot.test.fake import fakeprotocol from buildbot.worker import Worker try: from buildbot_worker.bot import LocalWorker as RemoteWorker except ImportError: RemoteWorker = None class FakeWorker: workername = 'test' def __init__(self, master): self.master = master self.conn = fakeprotocol.FakeConnection(master, self) self.properties = properties.Properties() self.defaultProperties = properties.Properties() self.workerid = 383 def acquireLocks(self): pass def releaseLocks(self): pass def attached(self, conn): self.worker_system = 'posix' self.path_module = os.path self.workerid = 1234 self.worker_basedir = '/wrk' return defer.succeed(None) def detached(self): pass def addWorkerForBuilder(self, wfb): pass def removeWorkerForBuilder(self, wfb): pass def buildFinished(self, wfb): pass def canStartBuild(self): pass def putInQuarantine(self): pass def resetQuarantine(self): pass class SeverWorkerConnectionMixin: _connection_severed = False _severed_deferreds = None def disconnect_worker(self): if not self._connection_severed: return if self._severed_deferreds is not None: for d in self._severed_deferreds: d.errback(pb.PBConnectionLost('lost connection')) self._connection_severed = False def sever_connection(self): # stubs the worker connection so that it appears that the TCP connection # has been severed in a way that no response is ever received, but # messages don't fail immediately. All callback will be called when # disconnect_worker is called self._connection_severed = True def register_deferred(): d = defer.Deferred() if self._severed_deferreds is None: self._severed_deferreds = [] self._severed_deferreds.append(d) return d def remotePrint(message): return register_deferred() self.worker.conn.remotePrint = remotePrint def remoteGetWorkerInfo(): return register_deferred() self.worker.conn.remoteGetWorkerInfo = remoteGetWorkerInfo def remoteSetBuilderList(builders): return register_deferred() self.worker.conn.remoteSetBuilderList = remoteSetBuilderList def remoteStartCommand(remoteCommand, builderName, commandId, commandName, args): return register_deferred() self.worker.conn.remoteStartCommand = remoteStartCommand def remoteShutdown(): return register_deferred() self.worker.conn.remoteShutdown = remoteShutdown def remoteStartBuild(builderName): return register_deferred() self.worker.conn.remoteStartBuild = remoteStartBuild def remoteInterruptCommand(builderName, commandId, why): return register_deferred() self.worker.conn.remoteInterruptCommand = remoteInterruptCommand class WorkerController(SeverWorkerConnectionMixin): """ A controller for a ``Worker``. https://glyph.twistedmatrix.com/2015/05/separate-your-fakes-and-your-inspectors.html """ def __init__(self, case, name, build_wait_timeout=600, worker_class=None, **kwargs): if worker_class is None: worker_class = Worker self.case = case self.build_wait_timeout = build_wait_timeout self.worker = worker_class(name, self, **kwargs) self.remote_worker = None def connect_worker(self): if self.remote_worker is not None: return if RemoteWorker is None: raise SkipTest("buildbot-worker package is not installed") workdir = FilePath(self.case.mktemp()) workdir.createDirectory() self.remote_worker = RemoteWorker(self.worker.name, workdir.path, False) self.remote_worker.setServiceParent(self.worker) def disconnect_worker(self): super().disconnect_worker() if self.remote_worker is None: return self.worker.conn, conn = None, self.worker.conn # LocalWorker does actually disconnect, so we must force disconnection # via detached conn.notifyDisconnected() ret = self.remote_worker.disownServiceParent() self.remote_worker = None return ret buildbot-2.6.0/master/buildbot/test/fuzz/000077500000000000000000000000001361162603000203755ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/fuzz/__init__.py000066400000000000000000000000001361162603000224740ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/fuzz/test_lru.py000066400000000000000000000065171361162603000226210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from buildbot.test.util import fuzz from buildbot.util import lru # construct weakref-able objects for particular keys def short(k): return set([k.upper() * 3]) def long(k): return set([k.upper() * 6]) def deferUntilLater(secs, result=None): d = defer.Deferred() reactor.callLater(secs, d.callback, result) return d class LRUCacheFuzzer(fuzz.FuzzTestCase): FUZZ_TIME = 60 def setUp(self): lru.inv_failed = False def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") if hasattr(self, 'lru'): log.msg("hits: %d; misses: %d; refhits: %d" % (self.lru.hits, self.lru.misses, self.lru.refhits)) # tests @defer.inlineCallbacks def do_fuzz(self, endTime): lru.inv_failed = False def delayed_miss_fn(key): return deferUntilLater(random.uniform(0.001, 0.002), set([key + 1000])) self.lru = lru.AsyncLRUCache(delayed_miss_fn, 50) keys = list(range(250)) errors = [] # bail out early in the event of an error results = [] # keep references to (most) results # fire off as many requests as we can in one second, with lots of # overlap. while not errors and reactor.seconds() < endTime: key = random.choice(keys) d = self.lru.get(key) def check(result, key): self.assertEqual(result, set([key + 1000])) if random.uniform(0, 1.0) < 0.9: results.append(result) results[:-100] = [] d.addCallback(check, key) @d.addErrback def eb(f): errors.append(f) return f # unhandled error -> in the logs # give the reactor some time to process pending events if random.uniform(0, 1.0) < 0.5: yield deferUntilLater(0) # now wait until all of the pending calls have cleared, noting that # this method will be counted as one delayed call, in the current # implementation while len(reactor.getDelayedCalls()) > 1: # give the reactor some time to process pending events yield deferUntilLater(0.001) self.assertFalse(lru.inv_failed, "invariant failed; see logs") log.msg("hits: %d; misses: %d; refhits: %d" % (self.lru.hits, self.lru.misses, self.lru.refhits)) buildbot-2.6.0/master/buildbot/test/integration/000077500000000000000000000000001361162603000217225ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/integration/README.txt000066400000000000000000000003331361162603000234170ustar00rootroot00000000000000"Integration" tests are tests that exercise a significant chunk of the Buildbot code, and thus do not really count as unit tests. When debugging, get the unit tests working first, *then* work on the integration tests. buildbot-2.6.0/master/buildbot/test/integration/__init__.py000066400000000000000000000000001361162603000240210ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/integration/interop/000077500000000000000000000000001361162603000234025ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/integration/interop/__init__.py000066400000000000000000000015071361162603000255160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """Tests in this module are meant to be used for interoperability between different version of worker vs master (e.g py2 vs py3) """ buildbot-2.6.0/master/buildbot/test/integration/interop/test_commandmixin.py000066400000000000000000000062051361162603000275010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import results from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import CommandMixin from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # and makes sure the command mixin is working. class CommandMixinMaster(RunMasterBase): @defer.inlineCallbacks def test_commandmixin(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], results.SUCCESS) class CommandMixinMasterPB(CommandMixinMaster): proto = "pb" class TestCommandMixinStep(BuildStep, CommandMixin): @defer.inlineCallbacks def run(self): contents = yield self.runGlob('*') if contents != []: return results.FAILURE hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE yield self.runMkdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if not hasPath: return results.FAILURE contents = yield self.runGlob('*') if not contents[0].endswith('composite_mixin_test'): return results.FAILURE yield self.runRmdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE return results.SUCCESS # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(TestCommandMixinStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_compositestepmixin.py000066400000000000000000000064371361162603000307700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process import results from buildbot.process.buildstep import BuildStep from buildbot.steps.worker import CompositeStepMixin from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # and makes sure the composite step mixin is working. class CompositeStepMixinMaster(RunMasterBase): @defer.inlineCallbacks def test_compositemixin(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], results.SUCCESS) class CompositeStepMixinMasterPb(CompositeStepMixinMaster): proto = "pb" class TestCompositeMixinStep(BuildStep, CompositeStepMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.logEnviron = False @defer.inlineCallbacks def run(self): contents = yield self.runGlob('*') if contents != []: return results.FAILURE hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE yield self.runMkdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if not hasPath: return results.FAILURE contents = yield self.runGlob('*') if not contents[0].endswith('composite_mixin_test'): return results.FAILURE yield self.runRmdir('composite_mixin_test') hasPath = yield self.pathExists('composite_mixin_test') if hasPath: return results.FAILURE return results.SUCCESS # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(TestCompositeMixinStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_integration_secrets.py000066400000000000000000000105271361162603000310730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from parameterized import parameterized from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.reporters.http import HttpStatusPush from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.integration import RunMasterBase class FakeSecretReporter(HttpStatusPush): def send(self, build): assert self.auth == ('user', 'myhttppasswd') self.reported = True class SecretsConfig(RunMasterBase): @parameterized.expand([ ('with_interpolation', True), ('plain_command', False), ]) @defer.inlineCallbacks def test_secret(self, name, use_interpolation): c = masterConfig(use_interpolation) yield self.setupConfig(c) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) # check the command line res = yield self.checkBuildStepLogExist(build, "echo ") # also check the secrets are replaced in argv yield self.checkBuildStepLogExist(build, "argv:.*echo.*", regex=True) # also check that the correct value goes to the command if os.name == "posix" and use_interpolation: res &= yield self.checkBuildStepLogExist(build, "The password was there") self.assertTrue(res) # at this point, build contains all the log and steps info that is in the db # we check that our secret is not in there! self.assertNotIn("bar", repr(build)) self.assertTrue(c['services'][0].reported) @parameterized.expand([ ('with_interpolation', True), ('plain_command', False), ]) @defer.inlineCallbacks def test_secretReconfig(self, name, use_interpolation): c = masterConfig(use_interpolation) yield self.setupConfig(c) c['secretsProviders'] = [FakeSecretStorage( secretdict={"foo": "different_value", "something": "more"})] yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "echo ") self.assertTrue(res) # at this point, build contains all the log and steps info that is in the db # we check that our secret is not in there! self.assertNotIn("different_value", repr(build)) class SecretsConfigPB(SecretsConfig): proto = "pb" # master configuration def masterConfig(use_interpolation): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers, steps, util c['services'] = [FakeSecretReporter('http://example.com/hook', auth=('user', Interpolate('%(secret:httppasswd)s')))] c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] c['secretsProviders'] = [FakeSecretStorage( secretdict={"foo": "bar", "something": "more", 'httppasswd': 'myhttppasswd'})] f = BuildFactory() if use_interpolation: if os.name == "posix": # on posix we can also check whether the password was passed to the command command = Interpolate('echo %(secret:foo)s | sed "s/bar/The password was there/"') else: command = Interpolate('echo %(secret:foo)s') else: command = ['echo', util.Secret('foo')] f.addStep(steps.ShellCommand(command=command)) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_interruptcommand.py000066400000000000000000000045021361162603000304070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.results import CANCELLED from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase from buildbot.util import asyncSleep class InterruptCommand(RunMasterBase): """Make sure we can interrupt a command""" @flaky(bugNumber=4404, onPlatform='win32') @defer.inlineCallbacks def test_setProp(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True) self.assertEqual(build['steps'][-1]['results'], CANCELLED) class InterruptCommandPb(InterruptCommand): proto = "pb" # master configuration num_reconfig = 0 def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.plugins import schedulers, steps, util class SleepAndInterrupt(steps.ShellSequence): @defer.inlineCallbacks def run(self): if self.worker.worker_system == "nt": sleep = "waitfor SomethingThatIsNeverHappening /t 100 >nul 2>&1" else: sleep = ["sleep", "100"] d = self.runShellSequence([util.ShellArg(sleep)]) yield asyncSleep(1) self.interrupt("just testing") res = yield d return res c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = util.BuildFactory() f.addStep(SleepAndInterrupt()) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_setpropertyfromcommand.py000066400000000000000000000046461361162603000316500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.internet import task from buildbot.test.util.integration import RunMasterBase # This integration test helps reproduce http://trac.buildbot.net/ticket/3024 # we make sure that we can reconfigure the master while build is running class SetPropertyFromCommand(RunMasterBase): @defer.inlineCallbacks def test_setProp(self): yield self.setupConfig(masterConfig()) oldNewLog = self.master.data.updates.addLog @defer.inlineCallbacks def newLog(*arg, **kw): # Simulate db delay. We usually don't test race conditions # with delays, but in integrations test, that would be pretty # tricky yield task.deferLater(reactor, .1, lambda: None) res = yield oldNewLog(*arg, **kw) return res self.master.data.updates.addLog = newLog build = yield self.doForceBuild(wantProperties=True) self.assertEqual( build['properties']['test'], ('foo', 'SetPropertyFromCommand Step')) class SetPropertyFromCommandPB(SetPropertyFromCommand): proto = "pb" # master configuration num_reconfig = 0 def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.plugins import schedulers, steps, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = util.BuildFactory() f.addStep(steps.SetPropertyFromCommand( property="test", command=["echo", "foo"])) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_transfer.py000066400000000000000000000174511361162603000266470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil from twisted.internet import defer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment # and make sure the transfer steps are working # When new protocols are added, make sure you update this test to exercise # your proto implementation class TransferStepsMasterPb(RunMasterBase): proto = "pb" def readMasterDirContents(self, top): contents = {} for root, dirs, files in os.walk(top): for name in files: fn = os.path.join(root, name) with open(fn) as f: contents[fn] = f.read() return contents def get_config_single_step(self, step): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c def get_non_existing_file_upload_config(self): from buildbot.plugins import steps step = steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest") return self.get_config_single_step(step) def get_non_existing_directory_upload_config(self): from buildbot.plugins import steps step = steps.DirectoryUpload(workersrc="dir/noexist_path", masterdest="master_dest") return self.get_config_single_step(step) def get_non_existing_multiple_file_upload_config(self): from buildbot.plugins import steps step = steps.MultipleFileUpload(workersrcs=["dir/noexist_path"], masterdest="master_dest") return self.get_config_single_step(step) @flaky(bugNumber=4407, onPlatform='win32') @defer.inlineCallbacks def test_transfer(self): yield self.setupConfig(masterConfig(bigfilename=self.mktemp())) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) dirContents = self.readMasterDirContents("dir") self.assertEqual( dirContents, {os.path.join('dir', 'file1.txt'): 'filecontent', os.path.join('dir', 'file2.txt'): 'filecontent2', os.path.join('dir', 'file3.txt'): 'filecontent2'}) # cleanup our mess (worker is cleaned up by parent class) shutil.rmtree("dir") os.unlink("master.txt") @defer.inlineCallbacks def test_globTransfer(self): yield self.setupConfig(masterGlobConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) dirContents = self.readMasterDirContents("dest") self.assertEqual(dirContents, { os.path.join('dest', 'file1.txt'): 'filecontent', os.path.join('dest', 'notafile1.txt'): 'filecontent2', os.path.join('dest', 'only1.txt'): 'filecontent2' }) # cleanup shutil.rmtree("dest") @defer.inlineCallbacks def test_no_exist_file_upload(self): yield self.setupConfig(self.get_non_existing_file_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) @defer.inlineCallbacks def test_no_exist_directory_upload(self): yield self.setupConfig(self.get_non_existing_directory_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) @defer.inlineCallbacks def test_no_exist_multiple_file_upload(self): yield self.setupConfig(self.get_non_existing_multiple_file_upload_config()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res) class TransferStepsMasterNull(TransferStepsMasterPb): proto = "null" # master configuration def masterConfig(bigfilename): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/file2.txt")) # create 8 MB file with open(bigfilename, 'w') as o: buf = "xxxxxxxx" * 1024 for i in range(1000): o.write(buf) f.addStep( steps.FileDownload( mastersrc=bigfilename, workerdest="bigfile.txt")) f.addStep( steps.FileUpload(workersrc="dir/file2.txt", masterdest="master.txt")) f.addStep( steps.FileDownload(mastersrc="master.txt", workerdest="dir/file3.txt")) f.addStep(steps.DirectoryUpload(workersrc="dir", masterdest="dir")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c def masterGlobConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers from buildbot.steps.worker import CompositeStepMixin class CustomStep(steps.BuildStep, CompositeStepMixin): @defer.inlineCallbacks def run(self): content = yield self.getFileContentFromWorker( "dir/file1.txt", abandonOnFailure=True) assert content == "filecontent" return SUCCESS c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"]) ] f = BuildFactory() f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep( steps.StringDownload( "filecontent2", workerdest="dir/notafile1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/only1.txt")) f.addStep( steps.MultipleFileUpload( workersrcs=["dir/file*.txt", "dir/not*.txt", "dir/only?.txt"], masterdest="dest/", glob=True)) f.addStep(CustomStep()) c['builders'] = [ BuilderConfig( name="testy", workernames=["local1"], factory=f) ] return c buildbot-2.6.0/master/buildbot/test/integration/interop/test_worker_reconnect.py000066400000000000000000000042601361162603000303660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.buildstep import BuildStep from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase class DisconnectingStep(BuildStep): disconnection_list = [] def run(self): self.disconnection_list.append(self) if len(self.disconnection_list) < 2: self.worker.disconnect() return SUCCESS class WorkerReconnect(RunMasterBase): """integration test for testing worker disconnection and reconnection""" proto = "pb" @defer.inlineCallbacks def test_eventually_reconnect(self): DisconnectingStep.disconnection_list = [] yield self.setupConfig(masterConfig()) build = yield self.doForceBuild() self.assertEqual(build['buildid'], 2) self.assertEqual(len(DisconnectingStep.disconnection_list), 2) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]), schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(DisconnectingStep()) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/pki/000077500000000000000000000000001361162603000225055ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/integration/pki/127.0.0.1.crt000066400000000000000000000126661361162603000241760ustar00rootroot00000000000000Certificate: Data: Version: 3 (0x2) Serial Number: 1 (0x1) Signature Algorithm: sha256WithRSAEncryption Issuer: C=ZZ, ST=QA, L=Nowhere, O=Buildbot, OU=Development Team, CN=Buildbot CA/name=EasyRSA/emailAddress=buildbot@integration.test Validity Not Before: Sep 2 12:10:17 2016 GMT Not After : Aug 31 12:10:17 2026 GMT Subject: C=ZZ, ST=QA, L=Nowhere, O=Buildbot, OU=Development Team, CN=127.0.0.1/name=EasyRSA/emailAddress=buildbot@integration.test Subject Public Key Info: Public Key Algorithm: rsaEncryption Public-Key: (2048 bit) Modulus: 00:9e:e5:6e:8d:83:89:6e:3c:45:7a:37:2d:cf:dc: a4:37:38:30:b8:58:cb:50:b7:78:d2:f5:11:e4:e4: 3b:de:3f:02:f0:b5:4b:2a:f4:4e:e5:cc:f0:e7:cf: 43:a4:36:5a:22:6b:89:3e:aa:c4:ef:2c:75:3a:cc: 43:e4:8d:d5:99:4e:1f:08:a6:3d:36:2d:72:80:10: 7b:52:20:44:9a:c7:ee:6b:45:2f:41:cd:0e:3e:dd: 59:01:eb:bb:11:2c:cb:e4:34:bd:63:d9:73:84:90: 36:d9:9b:1b:1b:4f:d0:15:12:89:df:bd:a6:3c:cf: 7e:5b:f5:0b:4d:e1:18:47:1f:7c:58:e4:2a:ae:17: fa:c1:13:64:6f:06:78:32:92:8f:83:78:b0:5d:a4: 8b:7f:a5:8d:d5:c8:87:b1:37:28:17:7a:34:d5:83: 29:8c:e8:d1:1d:a2:df:4d:c5:94:22:4d:0e:75:92: 20:bb:8b:b4:08:85:fb:17:1e:8b:f3:86:b5:b5:5c: 63:9f:fa:3e:e7:52:7c:b6:c6:2a:a3:79:37:44:e0: fc:cd:0b:a1:fc:3c:42:fe:ee:a1:11:b1:c0:a4:17: fb:77:5f:89:ae:7c:55:37:0e:75:8e:93:a8:3a:c3: 34:1b:24:2f:39:87:2c:ee:f0:70:7e:d4:70:0d:db: 29:af Exponent: 65537 (0x10001) X509v3 extensions: X509v3 Basic Constraints: CA:FALSE Netscape Comment: Easy-RSA Generated Certificate X509v3 Subject Key Identifier: 18:6E:2E:76:45:FE:0D:4D:66:76:B6:4D:97:AE:DD:87:27:F0:42:A2 X509v3 Authority Key Identifier: keyid:FB:03:F2:3E:31:9D:6C:14:52:7D:8E:29:18:92:7E:75:43:7C:09:F9 DirName:/C=ZZ/ST=QA/L=Nowhere/O=Buildbot/OU=Development Team/CN=Buildbot CA/name=EasyRSA/emailAddress=buildbot@integration.test serial:B1:2A:2E:B0:BF:9B:5C:37 X509v3 Extended Key Usage: TLS Web Client Authentication X509v3 Key Usage: Digital Signature X509v3 Subject Alternative Name: DNS:127.0.0.1 Signature Algorithm: sha256WithRSAEncryption 58:d9:74:e7:ce:32:aa:b1:a7:dc:06:23:c6:bd:76:b4:3b:7b: 01:ec:82:61:b7:80:7e:ba:c9:ca:a0:48:40:ef:3e:ca:1c:55: 0d:64:3f:80:8c:01:5f:c0:2e:a3:b6:bd:ec:67:29:d6:cf:3e: f4:d2:b9:3b:70:84:95:d8:6d:81:dd:dc:07:6a:15:0c:48:ea: dd:b8:93:55:6f:3f:0d:6f:95:57:d3:dc:e4:a1:60:fd:d4:1b: 33:eb:b1:95:14:c0:65:c7:aa:95:f3:a6:0b:8b:73:fa:77:33: 61:68:e8:fd:cd:f5:1a:a4:c4:6b:78:5d:f6:3b:23:be:f4:92: 88:dc:42:d5:cb:04:96:0b:e5:a7:61:ad:1a:68:ef:8f:38:1f: cf:a0:de:5a:aa:27:e2:fb:98:de:eb:76:1b:a4:0c:2c:7b:8f: 38:14:21:28:f2:cb:c6:78:9f:43:c7:f6:9e:e9:49:54:fa:ff: 36:67:ee:69:2b:d2:3b:2d:08:25:7c:5f:f5:49:0a:23:c1:e3: 8b:4b:09:a5:15:95:60:02:9f:91:bf:64:9c:a8:99:9a:7a:bf: 7a:45:58:c2:0d:b1:da:f0:73:96:0e:9d:fd:f6:a3:02:8f:dc: fe:77:40:16:64:23:57:7f:87:d5:5b:8e:5a:3d:f1:2a:29:e2: c4:ea:d7:43 -----BEGIN CERTIFICATE----- MIIFTjCCBDagAwIBAgIBATANBgkqhkiG9w0BAQsFADCBrDELMAkGA1UEBhMCWlox CzAJBgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJv dDEZMBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3Qg Q0ExEDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGlu dGVncmF0aW9uLnRlc3QwHhcNMTYwOTAyMTIxMDE3WhcNMjYwODMxMTIxMDE3WjCB qjELMAkGA1UEBhMCWloxCzAJBgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREw DwYDVQQKEwhCdWlsZGJvdDEZMBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTESMBAG A1UEAxMJMTI3LjAuMC4xMRAwDgYDVQQpEwdFYXN5UlNBMSgwJgYJKoZIhvcNAQkB FhlidWlsZGJvdEBpbnRlZ3JhdGlvbi50ZXN0MIIBIjANBgkqhkiG9w0BAQEFAAOC AQ8AMIIBCgKCAQEAnuVujYOJbjxFejctz9ykNzgwuFjLULd40vUR5OQ73j8C8LVL KvRO5czw589DpDZaImuJPqrE7yx1OsxD5I3VmU4fCKY9Ni1ygBB7UiBEmsfua0Uv Qc0OPt1ZAeu7ESzL5DS9Y9lzhJA22ZsbG0/QFRKJ372mPM9+W/ULTeEYRx98WOQq rhf6wRNkbwZ4MpKPg3iwXaSLf6WN1ciHsTcoF3o01YMpjOjRHaLfTcWUIk0OdZIg u4u0CIX7Fx6L84a1tVxjn/o+51J8tsYqo3k3ROD8zQuh/DxC/u6hEbHApBf7d1+J rnxVNw51jpOoOsM0GyQvOYcs7vBwftRwDdsprwIDAQABo4IBeTCCAXUwCQYDVR0T BAIwADAtBglghkgBhvhCAQ0EIBYeRWFzeS1SU0EgR2VuZXJhdGVkIENlcnRpZmlj YXRlMB0GA1UdDgQWBBQYbi52Rf4NTWZ2tk2Xrt2HJ/BCojCB4QYDVR0jBIHZMIHW gBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+aGBsqSBrzCBrDELMAkGA1UEBhMCWloxCzAJ BgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJvdDEZ MBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3QgQ0Ex EDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGludGVn cmF0aW9uLnRlc3SCCQCxKi6wv5tcNzATBgNVHSUEDDAKBggrBgEFBQcDAjALBgNV HQ8EBAMCB4AwFAYDVR0RBA0wC4IJMTI3LjAuMC4xMA0GCSqGSIb3DQEBCwUAA4IB AQBY2XTnzjKqsafcBiPGvXa0O3sB7IJht4B+usnKoEhA7z7KHFUNZD+AjAFfwC6j tr3sZynWzz700rk7cISV2G2B3dwHahUMSOrduJNVbz8Nb5VX09zkoWD91Bsz67GV FMBlx6qV86YLi3P6dzNhaOj9zfUapMRreF32OyO+9JKI3ELVywSWC+WnYa0aaO+P OB/PoN5aqifi+5je63YbpAwse484FCEo8svGeJ9Dx/ae6UlU+v82Z+5pK9I7LQgl fF/1SQojweOLSwmlFZVgAp+Rv2ScqJmaer96RVjCDbHa8HOWDp399qMCj9z+d0AW ZCNXf4fVW45aPfEqKeLE6tdD -----END CERTIFICATE----- buildbot-2.6.0/master/buildbot/test/integration/pki/127.0.0.1.key000066400000000000000000000032501361162603000241630ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCe5W6Ng4luPEV6 Ny3P3KQ3ODC4WMtQt3jS9RHk5DvePwLwtUsq9E7lzPDnz0OkNloia4k+qsTvLHU6 zEPkjdWZTh8Ipj02LXKAEHtSIESax+5rRS9BzQ4+3VkB67sRLMvkNL1j2XOEkDbZ mxsbT9AVEonfvaY8z35b9QtN4RhHH3xY5CquF/rBE2RvBngyko+DeLBdpIt/pY3V yIexNygXejTVgymM6NEdot9NxZQiTQ51kiC7i7QIhfsXHovzhrW1XGOf+j7nUny2 xiqjeTdE4PzNC6H8PEL+7qERscCkF/t3X4mufFU3DnWOk6g6wzQbJC85hyzu8HB+ 1HAN2ymvAgMBAAECggEADV5sYMeyZm33woKl/hkoT+UQZFJEOPRW3Bj2enWhe999 VddLDcAkaz1E/5v2qvhPuRmnIHipvR3Wdy38gFxWnmFuRwIFoGtOeOvqFEzWuNcd fjUB9t1T14I0HO9Ce/1y4i51yNLg30Rq+QAN1cxvS3aV1xdTx0YF8aK6YsEPk9w2 XbBz7ujSRFE3/37uMboUNVLbHflu0l8UauJBbGm4Z4l1VALf1r38j2q3qEbJ9T+m uKu1SvjyNeqTb/wZPPYfBBe8TEHiilh+mif1fUN0F8Q/kQnSDgpTy1r94sfvuvwU E/+GR/h8Eawv6dW5TfoRMP80vMiA6vgSWf0tcQtwYQKBgQDOODGFoyjstpzTZ8P+ o2oFi/spwzxhtiMvA1dmb9xzmywDCJI7A6UmKn51wdFgA05O7RINF+daNL0JHzmK 0NBiNrChOIYKxNEBzjT0AC35ktUoEWt3EoWyuiDtqshkZE0ZbUaCGwI8R6zv+CMN yL5hB99V5sla43F8kblXlXAcqQKBgQDFQMmY6IrnLMAXuiW0Tj2G7ifM8sdlIVqv VCDjqWQjY21CuYZvQBphQD67rUSa0QPzDXK1FOnYEcZ31qyVvxvo+w+nGu7rmswX VpLmuegaKxzatW7PpLRXvC2P2WjTboHZlgxLqXKqzWJgcd87p6AF6QBtbT2CZnKK uBuWF59ylwKBgExJVWiOdzE6TMGX/VVRtoLeycclRk0PR+y4W7i1YOQTXzCwFwhl zM5VofqF/KJ8Fpfz812MnjUslVGuj2be4He0q5q4gj2xmXAFjGlHN7q/qsLrzsl5 vKPlXxEMwJ0CzdK+LfsraqRKD5umO7F5tZPHicMJYSuSQAVJEztnONYhAoGBAJNg sr+Cj7Xl46hWtrEe/C2CZ05j3sMaxqzVCLXQ7DbcpNgD0gPxO0SKQdTrwqSBopfI 5nmRpJ6BuW30gYJpBatvWeSa5QQ35mFRl/S31kknCSoIAUE3aF9dBBXEdOP0XyR5 TbqCYmBnkCdLLWVe+tsvmdgolJqHfPFUWZgtEj8FAoGAE6BFO+Lt9LSauo+JLycj PDWWnYP4ZhhI7loIT+n7Jw7eDPigLkYmUe7h3XhyQwz+mCPR4tbDUo9vw22KGLNW kUIrOSbYNIXFM3ZsbLIXhRqUNlhWuqfo/IidJv59iFsnsK0liBa2KjxPMIE2rn77 kkBS4k2hMfpgdtp4IkGCThY= -----END PRIVATE KEY----- buildbot-2.6.0/master/buildbot/test/integration/pki/ca/000077500000000000000000000000001361162603000230705ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/integration/pki/ca/ca.crt000066400000000000000000000033611361162603000241700ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIE9DCCA9ygAwIBAgIJALEqLrC/m1w3MA0GCSqGSIb3DQEBCwUAMIGsMQswCQYD VQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05vd2hlcmUxETAPBgNVBAoT CEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBUZWFtMRQwEgYDVQQDEwtC dWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYGCSqGSIb3DQEJARYZYnVp bGRib3RAaW50ZWdyYXRpb24udGVzdDAeFw0xNjA5MDIxMjA5NTJaFw0yNjA4MzEx MjA5NTJaMIGsMQswCQYDVQQGEwJaWjELMAkGA1UECBMCUUExEDAOBgNVBAcTB05v d2hlcmUxETAPBgNVBAoTCEJ1aWxkYm90MRkwFwYDVQQLExBEZXZlbG9wbWVudCBU ZWFtMRQwEgYDVQQDEwtCdWlsZGJvdCBDQTEQMA4GA1UEKRMHRWFzeVJTQTEoMCYG CSqGSIb3DQEJARYZYnVpbGRib3RAaW50ZWdyYXRpb24udGVzdDCCASIwDQYJKoZI hvcNAQEBBQADggEPADCCAQoCggEBALJZcC9j4XYBi1fYT/fibY2FRWn6Qh74b1Pg I7iIde6Sf3DPdh/ogYvZAT+cIlkZdo4v326d0EkuYKcywDvho8UeET6sIYhuHPDW lRl1Ret6ylxpbEfxFNvMoEGNhYAP0C6QS2eWEP9LkV2lCuMQtWWzdedjk+efqBjR Gozaim0lr/5lx7bnVx0oRLAgbI5/9Ukbopansfr+Cp9CpFpbNPGZSmELzC3FPKXK 5tycj8WEqlywlha2/VRnCZfYefB3aAuQqQilLh+QHyhn6hzc26+n5B0l8QvrMkOX atKdznMLzJWGxS7UwmDKcsolcMAW+82BZ8nUCBPF3U5PkTLO540CAwEAAaOCARUw ggERMB0GA1UdDgQWBBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+TCB4QYDVR0jBIHZMIHW gBT7A/I+MZ1sFFJ9jikYkn51Q3wJ+aGBsqSBrzCBrDELMAkGA1UEBhMCWloxCzAJ BgNVBAgTAlFBMRAwDgYDVQQHEwdOb3doZXJlMREwDwYDVQQKEwhCdWlsZGJvdDEZ MBcGA1UECxMQRGV2ZWxvcG1lbnQgVGVhbTEUMBIGA1UEAxMLQnVpbGRib3QgQ0Ex EDAOBgNVBCkTB0Vhc3lSU0ExKDAmBgkqhkiG9w0BCQEWGWJ1aWxkYm90QGludGVn cmF0aW9uLnRlc3SCCQCxKi6wv5tcNzAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEB CwUAA4IBAQCJGJVMAmwZRK/mRqm9E0e3s4YGmYT2jwX5IX17XljEy+1cS4huuZW2 33CFpslkT1MN/r8IIZWilxT/lTujHyt4eERGjE1oRVKU8rlTH8WUjFzPIVu7nkte 09abqynAoec8aQukg79NRCY1l/E2/WzfnUt3yTgKPfZmzoiN0K+hH4gVlWtrizPA LaGwoslYYTA6jHNEeMm8OQLNf17OTmAa7EpeIgVpLRCieI9S3JIG4WYU8fVkeuiU cB439SdixU4cecVjNfFDpq6JM8N6+DQoYOSNRt9Dy0ioGyx5D4lWoIQ+BmXQENal gw+XLyejeNTNgLOxf9pbNYMJqxhkTkoE -----END CERTIFICATE----- buildbot-2.6.0/master/buildbot/test/integration/test_URLs.py000066400000000000000000000042731361162603000241660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import runtime from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment # and make sure the UrlForBuild renderable is working class UrlForBuildMaster(RunMasterBase): proto = "null" @defer.inlineCallbacks def test_url(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], SUCCESS) if runtime.platformType == 'win32': command = "echo http://localhost:8080/#builders/1/builds/1" else: command = "echo 'http://localhost:8080/#builders/1/builds/1'" self.assertIn(command, build['steps'][1]['logs'][0]['contents']['content']) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.ShellCommand(command=["echo", util.URLForBuild])) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c buildbot-2.6.0/master/buildbot/test/integration/test_configs.py000066400000000000000000000066671361162603000250020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.python import util from twisted.trial import unittest from buildbot import config from buildbot.scripts import runner from buildbot.test.util import dirs from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.worker_transition import DeprecatedWorkerAPIWarning class RealConfigs(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') self.basedir = os.path.abspath('basedir') self.filename = os.path.abspath("test.cfg") def tearDown(self): self.tearDownDirs() def test_sample_config(self): filename = util.sibpath(runner.__file__, 'sample.cfg') with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): config.FileLoader(self.basedir, filename).loadConfig() def test_0_9_0b5_api_renamed_config(self): with open(self.filename, "w") as f: f.write(sample_0_9_0b5_api_renamed) with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): config.FileLoader(self.basedir, self.filename).loadConfig() # sample.cfg from various versions, with comments stripped. Adjustments made # for compatibility are marked with comments # Template for master configuration just after worker renaming. sample_0_9_0b5_api_renamed = """\ from buildbot.plugins import * c = BuildmasterConfig = {} c['workers'] = [worker.Worker("example-worker", "pass")] c['protocols'] = {'pb': {'port': 9989}} c['change_source'] = [] c['change_source'].append(changes.GitPoller( 'https://github.com/buildbot/hello-world.git', workdir='gitpoller-workdir', branch='master', pollinterval=300)) c['schedulers'] = [] c['schedulers'].append(schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"])) c['schedulers'].append(schedulers.ForceScheduler( name="force", builderNames=["runtests"])) factory = util.BuildFactory() factory.addStep(steps.Git(repourl='https://github.com/buildbot/hello-world.git', mode='incremental')) factory.addStep(steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) c['builders'] = [] c['builders'].append( util.BuilderConfig(name="runtests", workernames=["example-worker"], factory=factory)) c['title'] = "Pyflakes" c['titleURL'] = "https://launchpad.net/pyflakes" c['buildbotURL'] = "http://localhost:8010/" c['www'] = dict(port=8010, plugins=dict(waterfall_view={}, console_view={})) c['db'] = { 'db_url' : "sqlite:///state.sqlite", } """ buildbot-2.6.0/master/buildbot/test/integration/test_custom_buildstep.py000066400000000000000000000271711361162603000267300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import traceback import mock from twisted.internet import defer from twisted.internet import error from twisted.python import failure from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot import config from buildbot.process import builder from buildbot.process import buildrequest from buildbot.process import buildstep from buildbot.process import factory from buildbot.process import results from buildbot.process import workerforbuilder from buildbot.steps import shell from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.base import Worker class TestLogObserver(buildstep.LogObserver): def __init__(self): self.observed = [] def outReceived(self, txt): self.observed.append(txt) class OldStyleCustomBuildStep(buildstep.BuildStep): def __init__(self, reactor, arg1, arg2, doFail=False, **kwargs): super().__init__(**kwargs) self.reactor = reactor self.arg1 = arg1 self.arg2 = arg2 self.doFail = doFail def start(self): # don't complete immediately, or synchronously self.reactor.callLater(0, self.doStuff) def doStuff(self): try: self.addURL('bookmark', 'http://foo') self.addHTMLLog('compl.html', "A very short logfile\n") self.step_status.setText(['text']) self.step_status.setText2(['text2']) self.step_status.setText(['text3']) lo = TestLogObserver() self.addLogObserver('foo', lo) _log = self.addLog('foo') _log.addStdout('stdout\n') _log.addStdout('\N{SNOWMAN}\n'.encode('utf-8')) _log.addStderr('stderr\n') _log.finish() self.addCompleteLog('obs', 'Observer saw %r' % (lo.observed,)) if self.doFail: self.failed(failure.Failure(RuntimeError('oh noes'))) else: self.finished(results.SUCCESS) except Exception: traceback.print_exc() self.failed(failure.Failure()) class Latin1ProducingCustomBuildStep(buildstep.BuildStep): @defer.inlineCallbacks def run(self): _log = yield self.addLog('xx') output_str = '\N{CENT SIGN}' yield _log.addStdout(output_str) yield _log.finish() return results.SUCCESS class FailingCustomStep(buildstep.LoggingBuildStep): flunkOnFailure = True def __init__(self, exception=buildstep.BuildStepFailed, *args, **kwargs): super().__init__(*args, **kwargs) self.exception = exception @defer.inlineCallbacks def start(self): yield defer.succeed(None) raise self.exception() class OldBuildEPYDoc(shell.ShellCommand): command = ['epydoc'] def runCommand(self, cmd): # we don't have a real worker in this test harness, so fake it _log = cmd.logs['stdio'] _log.addStdout('some\noutput\n') return defer.succeed(None) def createSummary(self, log): for line in NativeStringIO(log.getText()): # what we do with the line isn't important to the test assert line in ('some\n', 'output\n') class OldPerlModuleTest(shell.Test): command = ['perl'] def runCommand(self, cmd): # we don't have a real worker in this test harness, so fake it _log = cmd.logs['stdio'] _log.addStdout('a\nb\nc\n') return defer.succeed(None) def evaluateCommand(self, cmd): # Get stdio, stripping pesky newlines etc. lines = [ line.replace('\r\n', '').replace('\r', '').replace('\n', '') for line in self.getLog('stdio').readlines() ] # .. the rest of this method isn't that interesting, as long as the # statement above worked assert lines == ['a', 'b', 'c'] return results.SUCCESS class RunSteps(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantMq=True, wantDb=True) self.master.db.insertTestData([ fakedb.Builder(id=80, name='test'), ]) self.builder = builder.Builder('test') self.builder._builderid = 80 self.builder.config_version = 0 self.builder.master = self.master self.builder.botmaster = mock.Mock() self.builder.botmaster.getLockFromLockAccesses = lambda l, c: [] yield self.builder.startService() self.factory = factory.BuildFactory() # will have steps added later new_config = config.MasterConfig() new_config.builders.append( config.BuilderConfig(name='test', workername='testworker', factory=self.factory)) yield self.builder.reconfigServiceWithBuildbotConfig(new_config) self.worker = Worker('worker', 'pass') self.worker.sendBuilderList = lambda: defer.succeed(None) self.worker.parent = mock.Mock() self.worker.master.botmaster = mock.Mock() self.worker.botmaster.maybeStartBuildsForWorker = lambda w: None self.worker.botmaster.getBuildersForWorker = lambda w: [] self.worker.parent = self.master self.worker.startService() self.conn = fakeprotocol.FakeConnection(self.master, self.worker) yield self.worker.attached(self.conn) wfb = self.workerforbuilder = workerforbuilder.WorkerForBuilder() wfb.setBuilder(self.builder) yield wfb.attached(self.worker, {}) # add the buildset/request self.bsid, brids = yield self.master.db.buildsets.addBuildset( sourcestamps=[{}], reason='x', properties={}, builderids=[80], waited_for=False) self.brdict = \ yield self.master.db.buildrequests.getBuildRequest(brids[80]) self.buildrequest = \ yield buildrequest.BuildRequest.fromBrdict(self.master, self.brdict) @defer.inlineCallbacks def tearDown(self): yield self.worker.stopService() yield self.builder.stopService() @defer.inlineCallbacks def do_test_step(self): # patch builder.buildFinished to signal us with a deferred bfd = defer.Deferred() old_buildFinished = self.builder.buildFinished def buildFinished(*args): old_buildFinished(*args) bfd.callback(None) self.builder.buildFinished = buildFinished # start the builder self.assertTrue((yield self.builder.maybeStartBuild( self.workerforbuilder, [self.buildrequest]))) # and wait for completion yield bfd # then get the BuildStatus and return it return self.master.status.lastBuilderStatus.lastBuildStatus def assertLogs(self, exp_logs): got_logs = {} for id, l in self.master.data.updates.logs.items(): self.assertTrue(l['finished']) got_logs[l['name']] = ''.join(l['content']) self.assertEqual(got_logs, exp_logs) @defer.inlineCallbacks def doOldStyleCustomBuildStep(self, slowDB=False): # patch out addLog to delay until we're ready newLogDeferreds = [] oldNewLog = self.master.data.updates.addLog def finishNewLog(self): for d in newLogDeferreds: self.reactor.callLater(0, d.callback, None) def delayedNewLog(*args, **kwargs): d = defer.Deferred() d.addCallback(lambda _: oldNewLog(*args, **kwargs)) newLogDeferreds.append(d) return d if slowDB: self.patch(self.master.data.updates, "addLog", delayedNewLog) self.patch(OldStyleCustomBuildStep, "_run_finished_hook", finishNewLog) self.factory.addStep(OldStyleCustomBuildStep(self.reactor, arg1=1, arg2=2)) yield self.do_test_step() self.assertLogs({ 'compl.html': 'A very short logfile\n', # this is one of the things that differs independently of # new/old style: encoding of logs and newlines 'foo': # 'stdout\n\xe2\x98\x83\nstderr\n', 'ostdout\no\N{SNOWMAN}\nestderr\n', 'obs': # if slowDB, the observer won't see anything before the end of this # instant step 'Observer saw []\n' if slowDB else # 'Observer saw [\'stdout\\n\', \'\\xe2\\x98\\x83\\n\']', 'Observer saw [' + repr('stdout\n') + ", " + repr("\u2603\n") + "]\n" }) def test_OldStyleCustomBuildStep(self): return self.doOldStyleCustomBuildStep(False) def test_OldStyleCustomBuildStepSlowDB(self): return self.doOldStyleCustomBuildStep(True) @defer.inlineCallbacks def test_OldStyleCustomBuildStep_failure(self): self.factory.addStep(OldStyleCustomBuildStep(self.reactor, arg1=1, arg2=2, doFail=1)) bs = yield self.do_test_step() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) self.assertEqual(bs.getResults(), results.EXCEPTION) @defer.inlineCallbacks def test_step_raising_buildstepfailed_in_start(self): self.factory.addStep(FailingCustomStep()) bs = yield self.do_test_step() self.assertEqual(bs.getResults(), results.FAILURE) @defer.inlineCallbacks def test_step_raising_exception_in_start(self): self.factory.addStep(FailingCustomStep(exception=ValueError)) bs = yield self.do_test_step() self.assertEqual(bs.getResults(), results.EXCEPTION) self.assertEqual(len(self.flushLoggedErrors(ValueError)), 1) @defer.inlineCallbacks def test_step_raising_connectionlost_in_start(self): self.factory.addStep(FailingCustomStep(exception=error.ConnectionLost)) bs = yield self.do_test_step() self.assertEqual(bs.getResults(), results.RETRY) @defer.inlineCallbacks def test_Latin1ProducingCustomBuildStep(self): self.factory.addStep( Latin1ProducingCustomBuildStep(logEncoding='latin-1')) yield self.do_test_step() self.assertLogs({ 'xx': 'o\N{CENT SIGN}\n', }) @defer.inlineCallbacks def test_OldBuildEPYDoc(self): # test old-style calls to log.getText, figuring readlines will be ok self.factory.addStep(OldBuildEPYDoc()) bs = yield self.do_test_step() self.assertEqual(bs.getResults(), results.FAILURE) @defer.inlineCallbacks def test_OldPerlModuleTest(self): # test old-style calls to self.getLog self.factory.addStep(OldPerlModuleTest()) bs = yield self.do_test_step() self.assertEqual(bs.getResults(), results.SUCCESS) buildbot-2.6.0/master/buildbot/test/integration/test_customservices.py000066400000000000000000000073761361162603000264260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a custom step # The custom step is using a CustomService, in order to calculate its result # we make sure that we can reconfigure the master while build is running class CustomServiceMaster(RunMasterBase): @flaky(bugNumber=3340) @defer.inlineCallbacks def test_customService(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True) self.assertEqual(build['steps'][0]['state_string'], 'num reconfig: 1') myService = self.master.service_manager.namedServices['myService'] self.assertEqual(myService.num_reconfig, 1) self.assertTrue(myService.running) # We do several reconfig, and make sure the service # are reconfigured as expected yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True) self.assertEqual(myService.num_reconfig, 2) self.assertEqual(build['steps'][0]['state_string'], 'num reconfig: 2') yield self.master.reconfig() myService2 = self.master.service_manager.namedServices['myService2'] self.assertTrue(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 3) yield self.master.reconfig() # second service removed self.assertNotIn( 'myService2', self.master.service_manager.namedServices) self.assertFalse(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 4) # master configuration num_reconfig = 0 def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.schedulers.forcesched import ForceScheduler from buildbot.steps.shell import ShellCommand from buildbot.util.service import BuildbotService class MyShellCommand(ShellCommand): def getResultSummary(self): service = self.master.service_manager.namedServices['myService'] return dict(step="num reconfig: %d" % (service.num_reconfig,)) class MyService(BuildbotService): name = "myService" def reconfigService(self, num_reconfig): self.num_reconfig = num_reconfig return defer.succeed(None) c['schedulers'] = [ ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(MyShellCommand(command='echo hei')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] c['services'] = [MyService(num_reconfig=num_reconfig)] if num_reconfig == 3: c['services'].append( MyService(name="myService2", num_reconfig=num_reconfig)) return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_force_with_patch.py000066400000000000000000000063711361162603000312550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.source.base import Source from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.test.util.integration import RunMasterBase # a simple patch which adds a Makefile PATCH = """diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8a5cf80 --- /dev/null +++ b/Makefile @@ -0,0 +1,2 @@ +all: +\techo OK """ class MySource(Source): """A source class which only applies the patch""" def startVC(self, branch, revision, patch): self.stdio_log = self.addLogForRemoteCommands("stdio") d = defer.succeed(SUCCESS) if patch: d.addCallback(self.patch, patch) d.addCallback(self.finished) d.addErrback(self.failed) return d class ShellMaster(RunMasterBase): @skipUnlessPlatformIs("posix") # make is not installed on windows @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True, forceParams={'foo_patch_body': PATCH}) self.assertEqual(build['buildid'], 1) # if makefile was not properly created, we would have a failure self.assertEqual(build['results'], SUCCESS) @defer.inlineCallbacks def test_shell_no_patch(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) # if no patch, the source step is happy, but the make step cannot find makefile self.assertEqual(build['steps'][1]['results'], SUCCESS) self.assertEqual(build['steps'][2]['results'], FAILURE) self.assertEqual(build['results'], FAILURE) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, util c['schedulers'] = [ schedulers.ForceScheduler( name="force", codebases=[util.CodebaseParameter( "foo", patch=util.PatchParameter())], builderNames=["testy"])] f = BuildFactory() f.addStep(MySource(codebase='foo')) # if the patch was applied correctly, then make will work! f.addStep(steps.ShellCommand(command=["make"])) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_mastershell.py000066400000000000000000000041001361162603000302540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.MasterShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_scheduler_reconfigure.py000066400000000000000000000054161361162603000323120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.plugins import schedulers from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): cfg = masterConfig() yield self.setupConfig(cfg) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) # switch the configuration of the scheduler, and make sure the correct builder is run cfg['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched1", builderNames=["testy2"]), schedulers.ForceScheduler( name="sched2", builderNames=["testy1"]) ] yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) builder = yield self.master.data.get(('builders', build['builderid'])) self.assertEqual(builder['name'], 'testy2') # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched1", builderNames=["testy1"]), schedulers.ForceScheduler( name="sched2", builderNames=["testy2"]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name=name, workernames=["local1"], factory=f) for name in ['testy1', 'testy2'] ] return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_secrets_with_vault.py000066400000000000000000000066141361162603000316630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import subprocess from unittest.case import SkipTest from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.secrets.providers.vault import HashiCorpVaultSecretProvider from buildbot.steps.shell import ShellCommand from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builders and a shellcommand step class SecretsConfig(RunMasterBase): def setUp(self): try: rv = subprocess.call(['docker', 'pull', 'vault']) if rv != 0: raise FileNotFoundError('docker') except FileNotFoundError: raise SkipTest( "Vault integration need docker environment to be setup") rv = subprocess.call(['docker', 'run', '-d', '-e', 'SKIP_SETCAP=yes', '-e', 'VAULT_DEV_ROOT_TOKEN_ID=my_vaulttoken', '-e', 'VAULT_TOKEN=my_vaulttoken', '--name=vault_for_buildbot', '-p', '8200:8200', 'vault']) self.assertEqual(rv, 0) self.addCleanup(self.remove_container) rv = subprocess.call(['docker', 'exec', '-e', 'VAULT_ADDR=http://127.0.0.1:8200/', 'vault_for_buildbot', 'vault', 'kv', 'put', 'secret/key', 'value=word']) self.assertEqual(rv, 0) def remove_container(self): subprocess.call(['docker', 'rm', '-f', 'vault_for_buildbot']) @defer.inlineCallbacks def test_secret(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "echo ") self.assertTrue(res) def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] # note that as of December 2018, the vault docker image default to kv # version 2 to be enabled by default c['secretsProviders'] = [HashiCorpVaultSecretProvider( vaultToken='my_vaulttoken', vaultServer="http://localhost:8200", apiVersion=2 )] f = BuildFactory() f.addStep(ShellCommand(command=[Interpolate('echo %(secret:key)s')])) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_template.py000066400000000000000000000045371361162603000275620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) # if you don't need change, you can just remove this change, and useChange parameter change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True, wantProperties=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['properties']['owners'], (['me@foo.com'], 'Build')) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]), schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_integration_with_secrets.py000066400000000000000000000050551361162603000304460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.process.properties import Interpolate from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.integration import RunMasterBase class SecretsConfig(RunMasterBase): @defer.inlineCallbacks def test_secret(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "") self.assertTrue(res) @defer.inlineCallbacks def test_withsecrets(self): yield self.setupConfig(masterConfig(use_with=True)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "") self.assertTrue(res) # master configuration def masterConfig(use_with=False): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers, steps c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] c['secretsProviders'] = [FakeSecretStorage( secretdict={"foo": "bar", "something": "more"})] f = BuildFactory() if use_with: secrets_list = [("pathA", Interpolate('%(secret:something)s'))] with f.withSecrets(secrets_list): f.addStep(steps.ShellCommand(command=Interpolate('echo %(secret:foo)s'))) else: f.addSteps([steps.ShellCommand(command=Interpolate('echo %(secret:foo)s'))], withSecrets=[("pathA", Interpolate('%(secret:something)s'))]) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_locks.py000066400000000000000000000427251361162603000244600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import util from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.fake.step import BuildStepController from buildbot.test.util.integration import RunFakeMasterTestCase from buildbot.util.eventual import flushEventualQueue class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def create_single_worker_two_builder_lock_config(self, lock_cls, mode): stepcontrollers = [BuildStepController(), BuildStepController()] lock = lock_cls("lock1", maxCount=1) config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step]), locks=[lock.access(mode)]), BuilderConfig(name='builder2', workernames=['worker1'], factory=BuildFactory([stepcontrollers[1].step]), locks=[lock.access(mode)]), ], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('builder1')), (yield master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, master, builder_ids @defer.inlineCallbacks def create_single_worker_two_builder_step_lock_config(self, lock_cls, mode): lock = lock_cls("lock1", maxCount=1) stepcontrollers = [BuildStepController(locks=[lock.access(mode)]), BuildStepController(locks=[lock.access(mode)])] config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step])), BuilderConfig(name='builder2', workernames=['worker1'], factory=BuildFactory([stepcontrollers[1].step])), ], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('builder1')), (yield master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, master, builder_ids @defer.inlineCallbacks def create_two_worker_two_builder_lock_config(self, mode): stepcontrollers = [BuildStepController(), BuildStepController()] master_lock = util.MasterLock("lock1", maxCount=1) config_dict = { 'builders': [ BuilderConfig(name='builder1', workernames=['worker1'], factory=BuildFactory([stepcontrollers[0].step]), locks=[master_lock.access(mode)]), BuilderConfig(name='builder2', workernames=['worker2'], factory=BuildFactory([stepcontrollers[1].step]), locks=[master_lock.access(mode)]), ], 'workers': [ self.createLocalWorker('worker1'), self.createLocalWorker('worker2'), ], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('builder1')), (yield master.data.updates.findBuilderId('builder2')), ] return stepcontrollers, master, builder_ids @defer.inlineCallbacks def assert_two_builds_created_one_after_another(self, stepcontrollers, master, builder_ids): # start two builds and verify that a second build starts after the # first is finished yield self.createBuildrequest(master, [builder_ids[0]]) yield self.createBuildrequest(master, [builder_ids[1]]) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 1) self.assertEqual(builds[0]['results'], None) self.assertEqual(builds[0]['builderid'], builder_ids[0]) stepcontrollers[0].finish_step(SUCCESS) # execute Build.releaseLocks which is called eventually yield flushEventualQueue() builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], None) self.assertEqual(builds[1]['builderid'], builder_ids[1]) stepcontrollers[1].finish_step(SUCCESS) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], SUCCESS) @defer.inlineCallbacks def assert_two_steps_created_one_after_another(self, stepcontrollers, master, builder_ids): # start two builds and verify that a second build starts after the # first is finished yield self.createBuildrequest(master, [builder_ids[0]]) yield self.createBuildrequest(master, [builder_ids[1]]) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], None) self.assertEqual(builds[0]['builderid'], builder_ids[0]) self.assertEqual(builds[1]['results'], None) self.assertEqual(builds[1]['builderid'], builder_ids[1]) self.assertTrue(stepcontrollers[0].running) self.assertFalse(stepcontrollers[1].running) stepcontrollers[0].finish_step(SUCCESS) yield flushEventualQueue() self.assertFalse(stepcontrollers[0].running) self.assertTrue(stepcontrollers[1].running) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], None) stepcontrollers[1].finish_step(SUCCESS) yield flushEventualQueue() builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), 2) self.assertEqual(builds[0]['results'], SUCCESS) self.assertEqual(builds[1]['results'], SUCCESS) @parameterized.expand([ (util.MasterLock, 'counting'), (util.MasterLock, 'exclusive'), (util.WorkerLock, 'counting'), (util.WorkerLock, 'exclusive'), ]) @defer.inlineCallbacks def test_builder_lock_prevents_concurrent_builds(self, lock_cls, mode): ''' Tests whether a builder lock works at all in preventing a build when the lock is taken. ''' stepcontrollers, master, builder_ids = \ yield self.create_single_worker_two_builder_lock_config(lock_cls, mode) yield self.assert_two_builds_created_one_after_another( stepcontrollers, master, builder_ids) @parameterized.expand([ (util.MasterLock, 'counting'), (util.MasterLock, 'exclusive'), (util.WorkerLock, 'counting'), (util.WorkerLock, 'exclusive'), ]) @defer.inlineCallbacks def test_step_lock_prevents_concurrent_builds(self, lock_cls, mode): ''' Tests whether a builder lock works at all in preventing a build when the lock is taken. ''' stepcontrollers, master, builder_ids = \ yield self.create_single_worker_two_builder_step_lock_config( lock_cls, mode) yield self.assert_two_steps_created_one_after_another( stepcontrollers, master, builder_ids) @parameterized.expand(['counting', 'exclusive']) @defer.inlineCallbacks def test_builder_lock_release_wakes_builds_for_another_builder(self, mode): """ If a builder locks a master lock then the build request distributor must retry running any buildrequests that might have been not scheduled due to unavailability of that lock when the lock becomes available. """ stepcontrollers, master, builder_ids = \ yield self.create_two_worker_two_builder_lock_config(mode) yield self.assert_two_builds_created_one_after_another( stepcontrollers, master, builder_ids) class TestReconfig(RunFakeMasterTestCase): def create_stepcontrollers(self, count, lock, mode): stepcontrollers = [] for i in range(count): locks = [lock.access(mode)] if lock is not None else [] stepcontrollers.append(BuildStepController(locks=locks)) return stepcontrollers def update_builder_config(self, config_dict, stepcontrollers, lock, mode): config_dict['builders'] = [] for i, stepcontroller in enumerate(stepcontrollers): locks = [lock.access(mode)] if lock is not None else [] b = BuilderConfig(name='builder{}'.format(i), workernames=['worker1'], factory=BuildFactory([stepcontroller.step]), locks=locks) config_dict['builders'].append(b) @defer.inlineCallbacks def create_single_worker_n_builder_lock_config(self, builder_count, lock_cls, max_count, mode): stepcontrollers = self.create_stepcontrollers(builder_count, None, None) lock = lock_cls("lock1", maxCount=max_count) config_dict = { 'builders': [], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } self.update_builder_config(config_dict, stepcontrollers, lock, mode) master = yield self.getMaster(config_dict) builder_ids = [] for i in range(builder_count): builder_ids.append(( yield master.data.updates.findBuilderId('builder{}'.format(i)))) return stepcontrollers, master, config_dict, lock, builder_ids @defer.inlineCallbacks def create_single_worker_n_builder_step_lock_config(self, builder_count, lock_cls, max_count, mode): lock = lock_cls("lock1", maxCount=max_count) stepcontrollers = self.create_stepcontrollers(builder_count, lock, mode) config_dict = { 'builders': [], 'workers': [ self.createLocalWorker('worker1'), ], 'protocols': {'null': {}}, 'multiMaster': True, } self.update_builder_config(config_dict, stepcontrollers, None, None) master = yield self.getMaster(config_dict) builder_ids = [] for i in range(builder_count): builder_ids.append(( yield master.data.updates.findBuilderId('builder{}'.format(i)))) return stepcontrollers, master, config_dict, lock, builder_ids @parameterized.expand([ (3, util.MasterLock, 'counting', 1, 2, 1, 2), (3, util.WorkerLock, 'counting', 1, 2, 1, 2), (3, util.MasterLock, 'counting', 2, 1, 2, 2), (3, util.WorkerLock, 'counting', 2, 1, 2, 2), (2, util.MasterLock, 'exclusive', 1, 2, 1, 1), (2, util.WorkerLock, 'exclusive', 1, 2, 1, 1), (2, util.MasterLock, 'exclusive', 2, 1, 1, 1), (2, util.WorkerLock, 'exclusive', 2, 1, 1, 1), ]) @defer.inlineCallbacks def test_changing_max_lock_count_does_not_break_builder_locks( self, builder_count, lock_cls, mode, max_count_before, max_count_after, allowed_builds_before, allowed_builds_after): ''' Check that Buildbot does not allow extra claims on a claimed lock after a reconfig that changed the maxCount of that lock. Some Buildbot versions created a completely separate real lock after each maxCount change, which allowed to e.g. take an exclusive lock twice. ''' stepcontrollers, master, config_dict, lock, builder_ids = \ yield self.create_single_worker_n_builder_lock_config( builder_count, lock_cls, max_count_before, mode) # create a number of builds and check that the expected number of them # start for i in range(builder_count): yield self.createBuildrequest(master, [builder_ids[i]]) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), allowed_builds_before) # update the config and reconfig the master lock = lock_cls(lock.name, maxCount=max_count_after) self.update_builder_config(config_dict, stepcontrollers, lock, mode) yield master.reconfig() yield flushEventualQueue() # check that the number of running builds matches expectation builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), allowed_builds_after) # finish the steps and check that builds finished as expected for stepcontroller in stepcontrollers: stepcontroller.finish_step(SUCCESS) yield flushEventualQueue() builds = yield master.data.get(("builds",)) for b in builds[allowed_builds_after:]: self.assertEqual(b['results'], SUCCESS) @parameterized.expand([ (3, util.MasterLock, 'counting', 1, 2, 1, 2), (3, util.WorkerLock, 'counting', 1, 2, 1, 2), (3, util.MasterLock, 'counting', 2, 1, 2, 2), (3, util.WorkerLock, 'counting', 2, 1, 2, 2), (2, util.MasterLock, 'exclusive', 1, 2, 1, 1), (2, util.WorkerLock, 'exclusive', 1, 2, 1, 1), (2, util.MasterLock, 'exclusive', 2, 1, 1, 1), (2, util.WorkerLock, 'exclusive', 2, 1, 1, 1), ]) @defer.inlineCallbacks def test_changing_max_lock_count_does_not_break_step_locks( self, builder_count, lock_cls, mode, max_count_before, max_count_after, allowed_steps_before, allowed_steps_after): ''' Check that Buildbot does not allow extra claims on a claimed lock after a reconfig that changed the maxCount of that lock. Some Buildbot versions created a completely separate real lock after each maxCount change, which allowed to e.g. take an exclusive lock twice. ''' stepcontrollers, master, config_dict, lock, builder_ids = \ yield self.create_single_worker_n_builder_step_lock_config( builder_count, lock_cls, max_count_before, mode) # create a number of builds and check that the expected number of them # start their steps for i in range(builder_count): yield self.createBuildrequest(master, [builder_ids[i]]) builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) self.assertEqual(sum(sc.running for sc in stepcontrollers), allowed_steps_before) # update the config and reconfig the master lock = lock_cls(lock.name, maxCount=max_count_after) new_stepcontrollers = \ self.create_stepcontrollers(builder_count, lock, mode) self.update_builder_config(config_dict, new_stepcontrollers, lock, mode) yield master.reconfig() yield flushEventualQueue() # check that all builds are still running builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) # check that the expected number of steps has been started and that # none of the new steps has been started self.assertEqual(sum(sc.running for sc in stepcontrollers), allowed_steps_before) self.assertEqual(sum(sc.running for sc in new_stepcontrollers), 0) # finish the steps and check that builds finished as expected for stepcontroller in stepcontrollers: stepcontroller.finish_step(SUCCESS) yield flushEventualQueue() builds = yield master.data.get(("builds",)) self.assertEqual(len(builds), builder_count) for b in builds: self.assertEqual(b['results'], SUCCESS) self.assertEqual(sum(sc.running for sc in stepcontrollers), 0) self.assertEqual(sum(sc.running for sc in new_stepcontrollers), 0) buildbot-2.6.0/master/buildbot/test/integration/test_log_finish.py000066400000000000000000000111631361162603000254560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.plugins import steps from buildbot.process.results import EXCEPTION from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase class TestLog(RunMasterBase): # master configuration def masterConfig(self, step): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c @defer.inlineCallbacks def test_shellcommand(self): class MyStep(steps.ShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.ShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], SUCCESS) self.assertTrue(self.curr_log.finished) @defer.inlineCallbacks def test_mastershellcommand(self): class MyStep(steps.MasterShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.MasterShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertEqual(build['results'], SUCCESS) self.assertTrue(self.curr_log.finished) @defer.inlineCallbacks def test_mastershellcommand_issue(self): class MyStep(steps.MasterShellCommand): def _newLog(obj, name, type, logid, logEncoding): r = steps.MasterShellCommand._newLog(obj, name, type, logid, logEncoding) self.curr_log = r self.patch(r, "finish", lambda: defer.fail(RuntimeError('Could not finish'))) return self.curr_log step = MyStep(command='echo hello') yield self.setupConfig(self.masterConfig(step)) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) self.assertFalse(self.curr_log.finished) self.assertEqual(build['results'], EXCEPTION) errors = self.flushLoggedErrors() self.assertEqual(len(errors), 1) error = errors[0] self.assertEqual(error.getErrorMessage(), 'Could not finish') buildbot-2.6.0/master/buildbot/test/integration/test_master.py000066400000000000000000000062251361162603000246330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.internet.task import deferLater from buildbot.changes.filter import ChangeFilter from buildbot.changes.pb import PBChangeSource from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.forcesched import ForceScheduler from buildbot.steps.shell import ShellCommand from buildbot.test.util import www from buildbot.test.util.integration import RunMasterBase from buildbot.worker import Worker class RunMaster(RunMasterBase, www.RequiresWwwMixin): proto = 'pb' @defer.inlineCallbacks def do_test_master(self): yield self.setupConfig(BuildmasterConfig, startWorker=False) # hang out for a fraction of a second, to let startup processes run yield deferLater(reactor, 0.01, lambda: None) # run this test twice, to make sure the first time shut everything down # correctly; if this second test fails, but the first succeeds, then # something is not cleaning up correctly in stopService. def test_master1(self): return self.do_test_master() def test_master2(self): return self.do_test_master() # master configuration # Note that the *same* configuration objects are used for both runs of the # master. This is a more strenuous test than strictly required, since a master # will generally re-execute master.cfg on startup. However, it's good form and # will help to flush out any bugs that may otherwise be difficult to find. c = BuildmasterConfig = {} c['workers'] = [Worker("local1", "localpw")] c['protocols'] = {'pb': {'port': 'tcp:0'}} c['change_source'] = [] c['change_source'] = PBChangeSource() c['schedulers'] = [] c['schedulers'].append(AnyBranchScheduler(name="all", change_filter=ChangeFilter( project_re='^testy/'), treeStableTimer=1 * 60, builderNames=['testy', ])) c['schedulers'].append(ForceScheduler( name="force", builderNames=["testy"])) f1 = BuildFactory() f1.addStep(ShellCommand(command='echo hi')) c['builders'] = [] c['builders'].append( BuilderConfig(name="testy", workernames=["local1"], factory=f1)) c['title'] = "test" c['titleURL'] = "test" c['buildbotURL'] = "http://localhost:8010/" buildbot-2.6.0/master/buildbot/test/integration/test_notifier.py000066400000000000000000000136131361162603000251560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 from twisted.internet import defer from buildbot.reporters.mail import ESMTPSenderFactory from buildbot.reporters.mail import MailNotifier from buildbot.reporters.message import MessageFormatter from buildbot.reporters.message import MessageFormatterMissingWorker from buildbot.reporters.pushover import PushoverNotifier from buildbot.test.util.integration import RunMasterBase from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes # This integration test creates a master and worker environment, # with one builders and a shellcommand step, and a MailNotifier class NotifierMaster(RunMasterBase): if not ESMTPSenderFactory: skip = ("twisted-mail unavailable, " "see: https://twistedmatrix.com/trac/ticket/8770") @defer.inlineCallbacks def setUp(self): self.mailDeferred = defer.Deferred() # patch MailNotifier.sendmail to know when the mail has been sent def sendMail(_, mail, recipients): self.mailDeferred.callback((mail.as_string(), recipients)) self.patch(MailNotifier, "sendMail", sendMail) self.notification = defer.Deferred() def sendNotification(_, params): self.notification.callback(params) self.patch(PushoverNotifier, "sendNotification", sendNotification) yield self.setupConfig(masterConfig()) @defer.inlineCallbacks def doTest(self, what): change = dict(branch="master", files=["foo.c"], author="author@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual(build['buildid'], 1) mail, recipients = yield self.mailDeferred self.assertEqual(recipients, ["author@foo.com"]) self.assertIn("From: bot@foo.com", mail) self.assertIn("Subject: Buildbot success in Buildbot", mail) self.assertEncodedIn("The Buildbot has detected a passing build", mail) params = yield self.notification self.assertEqual(build['buildid'], 1) self.assertEqual(params, {'title': "Buildbot success in Buildbot on {}".format(what), 'message': "This is a message."}) def assertEncodedIn(self, text, mail): # python 2.6 default transfer in base64 for utf-8 if "base64" not in mail: self.assertIn(text, mail) else: # b64encode and remove '=' padding (hence [:-1]) encodedBytes = base64.b64encode(unicode2bytes(text)).rstrip(b"=") encodedText = bytes2unicode(encodedBytes) self.assertIn(encodedText, mail) @defer.inlineCallbacks def test_notifiy_for_build(self): yield self.doTest('testy') @defer.inlineCallbacks def test_notifiy_for_buildset(self): self.master.config.services = [ MailNotifier("bot@foo.com", mode="all", buildSetSummary=True), PushoverNotifier('1234', 'abcd', mode="all", buildSetSummary=True, messageFormatter=MessageFormatter(template='This is a message.'))] yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) yield self.doTest('whole buildset') @defer.inlineCallbacks def test_missing_worker(self): yield self.master.data.updates.workerMissing( workerid='local1', masterid=self.master.masterid, last_connection='long time ago', notify=['admin@worker.org'], ) mail, recipients = yield self.mailDeferred self.assertIn("From: bot@foo.com", mail) self.assertEqual(recipients, ['admin@worker.org']) self.assertIn("Subject: Buildbot worker local1 missing", mail) self.assertIn("disconnected at long time ago", mail) self.assertEncodedIn("worker named local1 went away", mail) params = yield self.notification self.assertEqual(params, {'title': "Buildbot worker local1 missing", 'message': b"No worker."}) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers, reporters c['schedulers'] = [ schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] notifier = reporters.PushoverNotifier('1234', 'abcd', mode="all", watchedWorkers=['local1'], messageFormatter=MessageFormatter(template='This is a message.'), messageFormatterMissingWorker=MessageFormatterMissingWorker( template='No worker.')) c['services'] = [ reporters.MailNotifier("bot@foo.com", mode="all"), notifier ] return c buildbot-2.6.0/master/buildbot/test/integration/test_process_botmaster.py000066400000000000000000000044721361162603000271000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.process.workerforbuilder import PingException from buildbot.test.fake.worker import WorkerController from buildbot.test.util.integration import RunFakeMasterTestCase class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def do_terminates_ping_on_shutdown(self, quick_mode): """ During shutdown we want to terminate any outstanding pings. """ controller = WorkerController(self, 'local') config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=['local'], factory=BuildFactory()), ], 'workers': [controller.worker], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_id = yield master.data.updates.findBuilderId('testy') controller.connect_worker() controller.sever_connection() yield self.createBuildrequest(master, [builder_id]) # give time for any delayed actions to complete self.reactor.advance(1) yield master.botmaster.cleanShutdown(quickMode=quick_mode, stopReactor=False) self.flushLoggedErrors(PingException) def test_terminates_ping_on_shutdown_quick_mode(self): return self.do_terminates_ping_on_shutdown(quick_mode=True) def test_terminates_ping_on_shutdown_slow_mode(self): return self.do_terminates_ping_on_shutdown(quick_mode=False) buildbot-2.6.0/master/buildbot/test/integration/test_stop_trigger.py000066400000000000000000000133001361162603000260400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import textwrap from twisted.internet import defer from twisted.internet import reactor from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import CANCELLED from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with two builders and a trigger step linking them. the triggered build never ends # so that we can reliably stop it recursively # master configurations def setupTriggerConfiguration(triggeredFactory, nextBuild=None): c = {} c['schedulers'] = [ schedulers.Triggerable( name="trigsched", builderNames=["triggered"]), schedulers.AnyBranchScheduler( name="sched", builderNames=["main"])] f = BuildFactory() f.addStep(steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) mainBuilder = BuilderConfig(name="main", workernames=["local1"], factory=f) triggeredBuilderKwargs = {'name': "triggered", 'workernames': ["local1"], 'factory': triggeredFactory} if nextBuild is not None: triggeredBuilderKwargs['nextBuild'] = nextBuild triggeredBuilder = BuilderConfig(**triggeredBuilderKwargs) c['builders'] = [mainBuilder, triggeredBuilder] return c def triggerRunsForever(): f2 = BuildFactory() # Infinite sleep command. if sys.platform == 'win32': # Ping localhost infinitely. # There are other options, however they either don't work in # non-interactive mode (e.g. 'pause'), or doesn't available on all # Windows versions (e.g. 'timeout' and 'choice' are available # starting from Windows 7). cmd = 'ping -t 127.0.0.1'.split() else: cmd = textwrap.dedent("""\ while : do echo "sleeping"; sleep 1; done """) f2.addStep(steps.ShellCommand(command=cmd)) return setupTriggerConfiguration(f2) def triggeredBuildIsNotCreated(): f2 = BuildFactory() f2.addStep(steps.ShellCommand(command="echo 'hello'")) def nextBuild(*args, **kwargs): return defer.succeed(None) return setupTriggerConfiguration(f2, nextBuild=nextBuild) class TriggeringMaster(RunMasterBase): timeout = 120 change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none") def assertBuildIsCancelled(self, b): self.assertTrue(b['complete']) self.assertEqual(b['results'], CANCELLED, repr(b)) @defer.inlineCallbacks def runTest(self, newBuildCallback, flushErrors=False): newConsumer = yield self.master.mq.startConsuming( newBuildCallback, ('builds', None, 'new')) build = yield self.doForceBuild(wantSteps=True, useChange=self.change, wantLogs=True) self.assertBuildIsCancelled(build) newConsumer.stopConsuming() builds = yield self.master.data.get(("builds",)) for b in builds: self.assertBuildIsCancelled(b) if flushErrors: self.flushLoggedErrors() @defer.inlineCallbacks def testTriggerRunsForever(self): yield self.setupConfig(triggerRunsForever()) self.higherBuild = None def newCallback(_, data): if self.higherBuild is None: self.higherBuild = data['buildid'] else: self.master.data.control( "stop", {}, ("builds", self.higherBuild)) self.higherBuild = None yield self.runTest(newCallback, flushErrors=True) @defer.inlineCallbacks def testTriggerRunsForeverAfterCmdStarted(self): yield self.setupConfig(triggerRunsForever()) self.higherBuild = None def newCallback(_, data): if self.higherBuild is None: self.higherBuild = data['buildid'] else: def f(): self.master.data.control( "stop", {}, ("builds", self.higherBuild)) self.higherBuild = None reactor.callLater(5.0, f) yield self.runTest(newCallback, flushErrors=True) @defer.inlineCallbacks def testTriggeredBuildIsNotCreated(self): yield self.setupConfig(triggeredBuildIsNotCreated()) def newCallback(_, data): self.master.data.control("stop", {}, ("builds", data['buildid'])) yield self.runTest(newCallback) buildbot-2.6.0/master/buildbot/test/integration/test_telegram_bot.py000066400000000000000000000231171361162603000260030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from twisted.web import client from twisted.web.http_headers import Headers from twisted.web.iweb import IBodyProducer from zope.interface import implementer from buildbot.data import connector as dataconnector from buildbot.mq import connector as mqconnector from buildbot.reporters import telegram from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import db from buildbot.test.util import www from buildbot.test.util.decorators import flaky from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz from buildbot.www import service as wwwservice @implementer(IBodyProducer) class BytesProducer(object): def __init__(self, body): self.body = body self.length = len(body) def startProducing(self, consumer): consumer.write(self.body) return defer.succeed(None) def pauseProducing(self): pass def stopProducing(self): pass class TelegramBot(db.RealDatabaseWithConnectorMixin, www.RequiresWwwMixin, unittest.TestCase): master = None @defer.inlineCallbacks def get_http(self, bot_token): base_url = "https://api.telegram.org/telegram" + bot_token http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, base_url) # This is necessary as Telegram will make requests in the reconfig http.expect("post", "/getMe", content_json={'ok': 1, 'result': {'username': 'testbot'}}) if bot_token == 'poll': http.expect("post", "/deleteWebhook", content_json={'ok': 1}) else: http.expect("post", "/setWebhook", json={'url': bytes2unicode(self.bot_url)}, content_json={'ok': 1}) return http @defer.inlineCallbacks def setUp(self): table_names = [ 'objects', 'object_state', 'masters', 'workers', 'configured_workers', 'connected_workers', 'builder_masters', 'builders' ] master = fakemaster.make_master(self, wantRealReactor=True) yield self.setUpRealDatabaseWithConnector(master, table_names=table_names, sqlite_memory=False) master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master self.agent = client.Agent(reactor) # create a telegram bot service tb = master.config.services['TelegramBot'] = telegram.TelegramBot( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['worker'] ) tb._get_http = self.get_http yield tb.setServiceParent(self.master) self.bot_url = self.url + b"telegram12345:secret" yield tb.startService() self.sent_messages = [] def send_message(chat, message, **kwargs): self.sent_messages.append((chat, message)) tb.bot.send_message = send_message @defer.inlineCallbacks def tearDown(self): if self.master: yield self.master.www.stopService() yield self.tearDownRealDatabaseWithConnector() @flaky(issueNumber=5120) @defer.inlineCallbacks def testWebhook(self): payload = unicode2bytes(json.dumps({ "update_id": 12345, "message": { "message_id": 123, "from": { "id": 123456789, "first_name": "Alice", }, "chat": { "id": -12345678, "title": "Wonderlands", "type": "group" }, "date": 1566688888, "text": "/getid", } })) pg = yield self.agent.request(b'POST', self.bot_url, Headers({'Content-Type': ['application/json']}), BytesProducer(payload)) self.assertEqual(pg.code, 202, "did not get 202 response for '{}'".format(bytes2unicode(self.bot_url))) self.assertIn('123456789', self.sent_messages[0][1]) self.assertIn('-12345678', self.sent_messages[1][1]) @flaky(issueNumber=5120) @defer.inlineCallbacks def testReconfig(self): tb = self.master.config.services['TelegramBot'] yield tb.reconfigService( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['problem'] ) @flaky(issueNumber=5120) @defer.inlineCallbacks def testLoadState(self): tboid = yield self.master.db.state.getObjectId('testbot', 'buildbot.reporters.telegram.TelegramWebhookBot') yield self.insertTestData([ fakedb.ObjectState(objectid=tboid, name='notify_events', value_json='[[123456789, ["started", "finished"]]]'), fakedb.ObjectState(objectid=tboid, name='missing_workers', value_json='[[123456789, [12]]]'), ]) tb = self.master.config.services['TelegramBot'] yield tb.bot.loadState() c = tb.bot.getContact({'id': 123456789}, {'id': 123456789}) self.assertEquals(c.channel.notify_events, {'started', 'finished'}) self.assertEquals(c.channel.missing_workers, {12}) @flaky(issueNumber=5120) @defer.inlineCallbacks def testSaveState(self): tb = self.master.config.services['TelegramBot'] tboid = yield self.master.db.state.getObjectId('testbot', 'buildbot.reporters.telegram.TelegramWebhookBot') notify_events = yield self.master.db.state.getState(tboid, 'notify_events', ()) missing_workers = yield self.master.db.state.getState(tboid, 'missing_workers', ()) self.assertNotIn([99, ['cancelled']], notify_events) self.assertNotIn([99, [13]], missing_workers) tb.bot.getChannel(98) # this channel should not be saved c = tb.bot.getChannel(99) self.assertIn(98, tb.bot.channels) self.assertIn(99, tb.bot.channels) c.notify_events = {'cancelled'} c.missing_workers = {13} yield tb.bot.saveNotifyEvents() yield tb.bot.saveMissingWorkers() notify_events = yield self.master.db.state.getState(tboid, 'notify_events', ()) missing_workers = yield self.master.db.state.getState(tboid, 'missing_workers', ()) self.assertNotIn(98, (c for c, _ in notify_events)) self.assertIn([99, ['cancelled']], notify_events) self.assertIn([99, [13]], missing_workers) @flaky(issueNumber=5120) @defer.inlineCallbacks def testMissingWorker(self): yield self.insertTestData([fakedb.Worker(id=1, name='local1')]) tb = self.master.config.services['TelegramBot'] channel = tb.bot.getChannel(-123456) self.assertEquals(channel.notify_events, {'worker'}) yield self.master.data.updates.workerMissing( workerid=1, masterid=self.master.masterid, last_connection='long time ago', notify=['admin@worker.org'], ) self.assertEquals(self.sent_messages[0][1], "Worker `local1` is missing. It was seen last on long time ago.") yield self.master.data.updates.workerConnected( workerid=1, masterid=self.master.masterid, workerinfo={}, ) self.assertEquals(self.sent_messages[1][1], "Worker `local1` is back online.") buildbot-2.6.0/master/buildbot/test/integration/test_trigger.py000066400000000000000000000075051361162603000250050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from io import StringIO from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with two builders and a trigger step linking them expectedOutputRegex = \ r"""\*\*\* BUILD 1 \*\*\* ==> build successful \(success\) \*\*\* STEP worker_preparation \*\*\* ==> worker ready \(success\) \*\*\* STEP shell \*\*\* ==> 'echo hello' \(success\) log:stdio \({loglines}\) \*\*\* STEP trigger \*\*\* ==> triggered trigsched \(success\) url:trigsched #2 \(http://localhost:8080/#buildrequests/2\) url:success: build #1 \(http://localhost:8080/#builders/(1|2)/builds/1\) \*\*\* STEP shell_1 \*\*\* ==> 'echo world' \(success\) log:stdio \({loglines}\) \*\*\* BUILD 2 \*\*\* ==> build successful \(success\) \*\*\* STEP worker_preparation \*\*\* ==> worker ready \(success\) \*\*\* STEP shell \*\*\* ==> 'echo ola' \(success\) log:stdio \({loglines}\) """ class TriggeringMaster(RunMasterBase): @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig(masterConfig()) change = dict(branch="master", files=["foo.c"], author="me@foo.com", committer="me@foo.com", comments="good stuff", revision="HEAD", project="none" ) build = yield self.doForceBuild(wantSteps=True, useChange=change, wantLogs=True) self.assertEqual( build['steps'][2]['state_string'], 'triggered trigsched') builds = yield self.master.data.get(("builds",)) self.assertEqual(len(builds), 2) dump = StringIO() for b in builds: yield self.printBuild(b, dump) # depending on the environment the number of lines is different between # test hosts loglines = builds[1]['steps'][1]['logs'][0]['num_lines'] self.assertRegex(dump.getvalue(), expectedOutputRegex.format(loglines=loglines)) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Triggerable( name="trigsched", builderNames=["build"]), schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f), BuilderConfig(name="build", workernames=["local1"], factory=f2)] return c buildbot-2.6.0/master/buildbot/test/integration/test_try_client.py000066400000000000000000000204271361162603000255140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.python.filepath import FilePath from buildbot import util from buildbot.clients import tryclient from buildbot.schedulers import trysched from buildbot.test.util import www from buildbot.test.util.integration import RunMasterBase # wait for some asynchronous result @defer.inlineCallbacks def waitFor(fn): while True: res = yield fn() if res: return res yield util.asyncSleep(.01) class Schedulers(RunMasterBase, www.RequiresWwwMixin): def setUp(self): self.master = None self.sch = None def spawnProcess(pp, executable, args, environ): tmpfile = os.path.join(self.jobdir, 'tmp', 'testy') newfile = os.path.join(self.jobdir, 'new', 'testy') with open(tmpfile, "w") as f: f.write(pp.job) os.rename(tmpfile, newfile) log.msg("wrote jobfile %s" % newfile) # get the scheduler to poll this directory now d = self.sch.watcher.poll() d.addErrback(log.err, 'while polling') @d.addCallback def finished(_): st = mock.Mock() st.value.signal = None st.value.exitCode = 0 pp.processEnded(st) self.patch(reactor, 'spawnProcess', spawnProcess) def getSourceStamp(vctype, treetop, branch=None, repository=None): return defer.succeed( tryclient.SourceStamp(branch='br', revision='rr', patch=(0, '++--'))) self.patch(tryclient, 'getSourceStamp', getSourceStamp) self.output = [] # stub out printStatus, as it's timing-based and thus causes # occasional test failures. self.patch(tryclient.Try, 'printStatus', lambda _: None) def output(*msg): msg = ' '.join(map(str, msg)) log.msg("output: %s" % msg) self.output.append(msg) self.patch(tryclient, 'output', output) def setupJobdir(self): jobdir = FilePath(self.mktemp()) jobdir.createDirectory() self.jobdir = jobdir.path for sub in 'new', 'tmp', 'cur': jobdir.child(sub).createDirectory() return self.jobdir @defer.inlineCallbacks def startMaster(self, sch): extra_config = { 'schedulers': [sch], } self.sch = sch yield self.setupConfig(masterConfig(extra_config)) # wait until the scheduler is active yield waitFor(lambda: self.sch.active) # and, for Try_Userpass, until it's registered its port if isinstance(self.sch, trysched.Try_Userpass): def getSchedulerPort(): if not self.sch.registrations: return self.serverPort = self.sch.registrations[0].getPort() log.msg("Scheduler registered at port %d" % self.serverPort) return True yield waitFor(getSchedulerPort) def runClient(self, config): self.clt = tryclient.Try(config) return self.clt.run(_inTests=True) @defer.inlineCallbacks def test_userpass_no_wait(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:%s' % self.serverPort, 'username': 'u', 'passwd': b'p', }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Delivering job; comment= None', 'job has been delivered', 'not waiting for builds to finish' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_userpass_wait(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'master': '127.0.0.1:%s' % self.serverPort, 'username': 'u', 'passwd': b'p', 'wait': True, }) self.assertEqual(self.output, [ "using 'pb' connect method", 'job created', 'Delivering job; comment= None', 'job has been delivered', 'All Builds Complete', 'a: success (build successful)', ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_userpass_list_builders(self): yield self.startMaster( trysched.Try_Userpass('try', ['a'], 0, [('u', b'p')])) yield self.runClient({ 'connect': 'pb', 'get-builder-names': True, 'master': '127.0.0.1:%s' % self.serverPort, 'username': 'u', 'passwd': b'p', }) self.assertEqual(self.output, [ "using 'pb' connect method", 'The following builders are available for the try scheduler: ', 'a' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 0) @defer.inlineCallbacks def test_jobdir_no_wait(self): jobdir = self.setupJobdir() yield self.startMaster(trysched.Try_Jobdir('try', ['a'], jobdir)) yield self.runClient({ 'connect': 'ssh', 'master': '127.0.0.1', 'username': 'u', 'passwd': b'p', 'builders': 'a', # appears to be required for ssh }) self.assertEqual(self.output, [ "using 'ssh' connect method", 'job created', 'job has been delivered', 'not waiting for builds to finish' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) @defer.inlineCallbacks def test_jobdir_wait(self): jobdir = self.setupJobdir() yield self.startMaster(trysched.Try_Jobdir('try', ['a'], jobdir)) yield self.runClient({ 'connect': 'ssh', 'wait': True, 'host': '127.0.0.1', 'username': 'u', 'passwd': b'p', 'builders': 'a', # appears to be required for ssh }) self.assertEqual(self.output, [ "using 'ssh' connect method", 'job created', 'job has been delivered', 'waiting for builds with ssh is not supported' ]) buildsets = yield self.master.db.buildsets.getBuildsets() self.assertEqual(len(buildsets), 1) def masterConfig(extra_config): c = {} from buildbot.config import BuilderConfig from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process import results class MyBuildStep(BuildStep): def start(self): self.finished(results.SUCCESS) c['change_source'] = [] c['schedulers'] = [] # filled in above f1 = BuildFactory() f1.addStep(MyBuildStep(name='one')) f1.addStep(MyBuildStep(name='two')) c['builders'] = [ BuilderConfig(name="a", workernames=["local1"], factory=f1), ] c['title'] = "test" c['titleURL'] = "test" c['buildbotURL'] = "http://localhost:8010/" c['mq'] = {'debug': True} # test wants to influence the config, but we still return a new config # each time c.update(extra_config) return c buildbot-2.6.0/master/buildbot/test/integration/test_try_client_e2e.py000066400000000000000000000042141361162603000262430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from buildbot.test.util.integration import RunMasterBase # This integration test tests that the try command line works end2end class TryClientE2E(RunMasterBase): timeout = 15 @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) def trigger_callback(): def thd(): os.system("buildbot try --connect=pb --master=127.0.0.1:8031 -b testy " "--property=foo:bar --username=alice --passwd=pw1 --vc=none") reactor.callInThread(thd) build = yield self.doForceBuild(wantSteps=True, triggerCallback=trigger_callback, wantLogs=True, wantProperties=True) self.assertEqual(build['buildid'], 1) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Try_Userpass(name="try", builderNames=["testy"], port=8031, userpass=[("alice", "pw1")]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_upgrade.py000066400000000000000000000232431361162603000247660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import locale import os import shutil import sqlite3 import tarfile import migrate import migrate.versioning.api from sqlalchemy.engine import reflection from sqlalchemy.exc import DatabaseError from twisted.internet import defer from twisted.python import util from twisted.trial import unittest from buildbot.db import connector from buildbot.db.model import EightUpgradeError from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import querylog from buildbot.test.util.misc import TestReactorMixin class UpgradeTestMixin(db.RealDatabaseMixin, TestReactorMixin): """Supporting code to test upgrading from older versions by untarring a basedir tarball and then checking that the results are as expected.""" # class variables to set in subclasses # filename of the tarball (sibling to this file) source_tarball = None # set to true in subclasses to set up and use a real DB use_real_db = False # db URL to use, if not using a real db db_url = "sqlite:///state.sqlite" # these tests take a long time on platforms where sqlite is slow # (e.g., lion, see #2256) timeout = 1200 @defer.inlineCallbacks def setUpUpgradeTest(self): # set up the "real" db if desired if self.use_real_db: # note this changes self.db_url yield self.setUpRealDatabase(sqlite_memory=False) self.basedir = None if self.source_tarball: tarball = util.sibpath(__file__, self.source_tarball) if not os.path.exists(tarball): raise unittest.SkipTest( "'%s' not found (normal when not building from Git)" % tarball) tf = tarfile.open(tarball) prefixes = set() for inf in tf: tf.extract(inf) prefixes.add(inf.name.split('/', 1)[0]) tf.close() # (note that tf.extractall isn't available in py2.4) # get the top-level dir from the tarball assert len(prefixes) == 1, "tarball has multiple top-level dirs!" self.basedir = prefixes.pop() else: if not os.path.exists("basedir"): os.makedirs("basedir") self.basedir = os.path.abspath("basedir") self.master = master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = connector.DBConnector(self.basedir) yield self.db.setServiceParent(master) yield self.db.setup(check_version=False) self._sql_log_handler = querylog.start_log_queries() @defer.inlineCallbacks def tearDownUpgradeTest(self): querylog.stop_log_queries(self._sql_log_handler) if self.use_real_db: yield self.tearDownRealDatabase() if self.basedir: shutil.rmtree(self.basedir) # save subclasses the trouble of calling our setUp and tearDown methods def setUp(self): self.setUpTestReactor() return self.setUpUpgradeTest() def tearDown(self): return self.tearDownUpgradeTest() def assertModelMatches(self): def comp(engine): # use compare_model_to_db, which gets everything but foreign # keys and indexes diff = migrate.versioning.api.compare_model_to_db( engine, self.db.model.repo_path, self.db.model.metadata) if diff: return diff # check indexes manually insp = reflection.Inspector.from_engine(engine) # unique, name, column_names diff = [] for tbl in self.db.model.metadata.sorted_tables: exp = sorted([ dict(name=idx.name, unique=idx.unique and 1 or 0, column_names=sorted([c.name for c in idx.columns])) for idx in tbl.indexes], key=lambda x: x['name']) # include implied indexes on postgres and mysql if engine.dialect.name == 'mysql': implied = [idx for (tname, idx) in self.db.model.implied_indexes if tname == tbl.name] exp = sorted(exp + implied, key=lambda k: k["name"]) got = sorted(insp.get_indexes(tbl.name), key=lambda x: x['name']) if exp != got: got_names = {idx['name'] for idx in got} exp_names = {idx['name'] for idx in exp} got_info = dict((idx['name'], idx) for idx in got) exp_info = dict((idx['name'], idx) for idx in exp) for name in got_names - exp_names: diff.append("got unexpected index %s on table %s: %r" % (name, tbl.name, got_info[name])) for name in exp_names - got_names: diff.append("missing index %s on table %s" % (name, tbl.name)) for name in got_names & exp_names: gi = dict(name=name, unique=got_info[name]['unique'] and 1 or 0, column_names=sorted(got_info[name]['column_names'])) ei = exp_info[name] if gi != ei: diff.append( "index %s on table %s differs: got %s; exp %s" % (name, tbl.name, gi, ei)) if diff: return "\n".join(diff) d = self.db.pool.do_with_engine(comp) # older sqlites cause failures in reflection, which manifest as a # TypeError. Reflection is only used for tests, so we can just skip # this test on such platforms. We still get the advantage of trying # the upgrade, at any rate. @d.addErrback def catch_TypeError(f): f.trap(TypeError) raise unittest.SkipTest("model comparison skipped: bugs in schema " "reflection on this sqlite version") @d.addCallback def check(diff): if diff: self.fail("\n" + str(diff)) return d def gotError(self, e): e.trap(sqlite3.DatabaseError, DatabaseError) if "file is encrypted or is not a database" in str(e): self.flushLoggedErrors(sqlite3.DatabaseError) self.flushLoggedErrors(DatabaseError) raise unittest.SkipTest( "sqlite dump not readable on this machine %s" % str(e)) return e def do_test_upgrade(self, pre_callbacks=None): if pre_callbacks is None: pre_callbacks = [] d = defer.succeed(None) for cb in pre_callbacks: d.addCallback(cb) d.addCallback(lambda _: self.db.model.upgrade()) d.addErrback(self.gotError) d.addCallback(lambda _: self.db.pool.do(self.verify_thd)) d.addCallback(lambda _: self.assertModelMatches()) return d class UpgradeTestEmpty(UpgradeTestMixin, unittest.TestCase): use_real_db = True def test_emptydb_modelmatches(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise(unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding))) d = self.db.model.upgrade() d.addCallback(lambda r: self.assertModelMatches()) return d class UpgradeTestV090b4(UpgradeTestMixin, unittest.TestCase): source_tarball = "v090b4.tgz" def test_upgrade(self): return self.do_test_upgrade() def verify_thd(self, conn): pass def test_gotError(self): def upgrade(): return defer.fail(sqlite3.DatabaseError('file is encrypted or is not a database')) self.db.model.upgrade = upgrade self.failureResultOf(self.do_test_upgrade(), unittest.SkipTest) def test_gotError2(self): def upgrade(): return defer.fail(DatabaseError('file is encrypted or is not a database', None, None)) self.db.model.upgrade = upgrade self.failureResultOf(self.do_test_upgrade(), unittest.SkipTest) class UpgradeTestV087p1(UpgradeTestMixin, unittest.TestCase): source_tarball = "v087p1.tgz" def gotError(self, e): self.flushLoggedErrors(EightUpgradeError) def verify_thd(self, conn): "partially verify the contents of the db - run in a thread" r = conn.execute("select version from migrate_version limit 1") version = r.scalar() self.assertEqual(version, 22) def assertModelMatches(self): pass def test_upgrade(self): return self.do_test_upgrade() buildbot-2.6.0/master/buildbot/test/integration/test_usePty.py000066400000000000000000000060561361162603000246330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from distutils.version import LooseVersion from twisted import __version__ as twistedVersion from twisted.internet import defer from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step, which use usePTY class ShellMaster(RunMasterBase): @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_usePTY(self): yield self.setupConfig(masterConfig(usePTY=True)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "in a terminal", onlyStdout=True) self.assertTrue(res) # Twisted versions less than 17.1.0 would issue a warning: # "Argument strings and environment keys/values passed to reactor.spawnProcess # "should be str, not unicode." # This warning was unnecessary. Even in the old versions of Twisted, the # unicode arguments were encoded. This warning was removed in Twisted here: # # https://github.com/twisted/twisted/commit/23fa3cc05549251ea4118e4e03354d58df87eaaa if LooseVersion(twistedVersion) < LooseVersion("17.1.0"): self.flushWarnings() @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_NOusePTY(self): yield self.setupConfig(masterConfig(usePTY=False)) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) res = yield self.checkBuildStepLogExist(build, "not a terminal", onlyStdout=True) self.assertTrue(res) # master configuration def masterConfig(usePTY): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand( command='if [ -t 1 ] ; then echo in a terminal; else echo "not a terminal"; fi', usePTY=usePTY)) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_virtual_builder.py000066400000000000000000000045601361162603000265340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a shellcommand step # meant to be a template for integration steps class ShellMaster(RunMasterBase): @defer.inlineCallbacks def test_shell(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['buildid'], 1) builders = yield self.master.data.get(("builders",)) self.assertEqual(len(builders), 2) self.assertEqual(builders[1], { 'masterids': [], 'tags': ['virtual', '_virtual_'], 'description': 'I am a virtual builder', 'name': 'virtual_testy', 'builderid': 2}) self.assertEqual(build['builderid'], builders[1]['builderid']) # master configuration def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], properties={ 'virtual_builder_name': 'virtual_testy', 'virtual_builder_description': 'I am a virtual builder', 'virtual_builder_tags': ['virtual'], }, factory=f)] return c buildbot-2.6.0/master/buildbot/test/integration/test_worker.py000066400000000000000000000206341361162603000246510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from zope.interface import implementer from buildbot.config import BuilderConfig from buildbot.interfaces import IBuildStepFactory from buildbot.machine.base import Machine from buildbot.process.buildstep import BuildStep from buildbot.process.factory import BuildFactory from buildbot.process.results import CANCELLED from buildbot.test.fake.latent import LatentController from buildbot.test.util.integration import RunFakeMasterTestCase try: from buildbot_worker.bot import LocalWorker as RemoteWorker except ImportError: RemoteWorker = None @implementer(IBuildStepFactory) class StepController: def __init__(self, **kwargs): self.kwargs = kwargs self.steps = [] def buildStep(self): step_deferred = defer.Deferred() step = ControllableStep(step_deferred, **self.kwargs) self.steps.append((step, step_deferred)) return step class ControllableStep(BuildStep): def run(self): return self._step_deferred def __init__(self, step_deferred, **kwargs): super().__init__(**kwargs) self._step_deferred = step_deferred def interrupt(self, reason): self._step_deferred.callback(CANCELLED) class Tests(RunFakeMasterTestCase): @defer.inlineCallbacks def test_latent_max_builds(self): """ If max_builds is set, only one build is started on a latent worker at a time. """ controller = LatentController(self, 'local', max_builds=1) step_controller = StepController() config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory([step_controller]), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory([step_controller]), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('testy-1')), (yield master.data.updates.findBuilderId('testy-2')), ] started_builds = [] yield master.mq.startConsuming( lambda key, build: started_builds.append(build), ('builds', None, 'new')) # Trigger a buildrequest bsid, brids = yield master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], ) # The worker fails to substantiate. controller.start_instance(True) controller.connect_worker() self.assertEqual(len(started_builds), 1) yield controller.auto_stop(True) @defer.inlineCallbacks def test_local_worker_max_builds(self): """ If max_builds is set, only one build is started on a worker at a time. """ step_controller = StepController() config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory([step_controller]), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory([step_controller]), ), ], 'workers': [self.createLocalWorker('local', max_builds=1)], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('testy-1')), (yield master.data.updates.findBuilderId('testy-2')), ] started_builds = [] yield master.mq.startConsuming( lambda key, build: started_builds.append(build), ('builds', None, 'new')) # Trigger a buildrequest bsid, brids = yield master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], ) self.assertEqual(len(started_builds), 1) @defer.inlineCallbacks def test_worker_registered_to_machine(self): worker = self.createLocalWorker('worker1', machine_name='machine1') machine = Machine('machine1') config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory(), ), ], 'workers': [worker], 'machines': [machine], 'protocols': {'null': {}}, 'multiMaster': True, } yield self.getMaster(config_dict) self.assertIs(worker.machine, machine) @defer.inlineCallbacks def test_worker_reconfigure_with_new_builder(self): """ Checks if we can successfully reconfigure if we add new builders to worker. """ config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=['local1'], factory=BuildFactory()), ], 'workers': [self.createLocalWorker('local1', max_builds=1)], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.getMaster(config_dict) config_dict['builders'] += [ BuilderConfig(name="builder2", workernames=['local1'], factory=BuildFactory()), ] config_dict['workers'] = [self.createLocalWorker('local1', max_builds=2)] # reconfig should succeed yield self.reconfigMaster(config_dict) @defer.inlineCallbacks def test_worker_os_release_info_roundtrip(self): """ Checks if we can successfully get information about the platform the worker is running on. This is very similar to test_worker_comm.TestWorkerComm.test_worker_info, except that we check details such as whether the information is passed in correct encoding. """ worker = self.createLocalWorker('local1') config_dict = { 'builders': [ BuilderConfig(name="builder1", workernames=['local1'], factory=BuildFactory()), ], 'workers': [worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } yield self.getMaster(config_dict) props = worker.worker_status.info from buildbot_worker.base import BotBase expected_props_dict = {} BotBase._read_os_release(BotBase.os_release_file, expected_props_dict) for key, value in expected_props_dict.items(): self.assertTrue(isinstance(key, str)) self.assertTrue(isinstance(value, str)) self.assertEqual(props.getProperty(key), value) if RemoteWorker is None: skip = "buildbot-worker package is not installed" buildbot-2.6.0/master/buildbot/test/integration/test_worker_comm.py000066400000000000000000000334561361162603000256720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.cred import credentials from twisted.internet import defer from twisted.internet import reactor from twisted.internet.endpoints import clientFromString from twisted.python import log from twisted.python import util from twisted.spread import pb from twisted.trial import unittest import buildbot from buildbot import config from buildbot import pbmanager from buildbot import worker from buildbot.process import botmaster from buildbot.process import builder from buildbot.process import factory from buildbot.status import master from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util.eventual import eventually from buildbot.worker import manager as workermanager PKI_DIR = util.sibpath(__file__, 'pki') class FakeWorkerForBuilder(pb.Referenceable): """ Fake worker-side WorkerForBuilder object """ class FakeWorkerWorker(pb.Referenceable): """ Fake worker-side Worker object @ivar master_persp: remote perspective on the master """ def __init__(self, callWhenBuilderListSet): self.callWhenBuilderListSet = callWhenBuilderListSet self.master_persp = None self._detach_deferreds = [] self._detached = False def waitForDetach(self): if self._detached: return defer.succeed(None) d = defer.Deferred() self._detach_deferreds.append(d) return d def setMasterPerspective(self, persp): self.master_persp = persp # clear out master_persp on disconnect def clear_persp(): self.master_persp = None persp.broker.notifyOnDisconnect(clear_persp) def fire_deferreds(): self._detached = True self._detach_deferreds, deferreds = None, self._detach_deferreds for d in deferreds: d.callback(None) persp.broker.notifyOnDisconnect(fire_deferreds) def remote_print(self, message): log.msg("WORKER-SIDE: remote_print(%r)" % (message,)) def remote_getWorkerInfo(self): return { 'info': 'here', 'worker_commands': { 'x': 1, }, 'numcpus': 1, 'none': None, 'os_release': b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(), b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82\xb9\xe3' b'\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(): b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode(), } def remote_getVersion(self): return buildbot.version def remote_getCommands(self): return {'x': 1} def remote_setBuilderList(self, builder_info): builder_names = [n for n, dir in builder_info] slbuilders = [FakeWorkerForBuilder() for n in builder_names] eventually(self.callWhenBuilderListSet) return dict(zip(builder_names, slbuilders)) class FakeBuilder(builder.Builder): def __init__(self, name): super().__init__(name) self.builder_status = mock.Mock() def attached(self, worker, commands): return defer.succeed(None) def detached(self, worker): pass def getOldestRequestTime(self): return 0 def maybeStartBuild(self): return defer.succeed(None) class MyWorker(worker.Worker): def attached(self, conn): self.detach_d = defer.Deferred() return super().attached(conn) def detached(self): super().detached() self.detach_d, d = None, self.detach_d d.callback(None) class TestWorkerComm(unittest.TestCase, TestReactorMixin): """ Test handling of connections from workers as integrated with - Twisted Spread - real TCP connections. - PBManager @ivar master: fake build master @ivar pbamanger: L{PBManager} instance @ivar botmaster: L{BotMaster} instance @ivar worker: master-side L{Worker} instance @ivar workerworker: worker-side L{FakeWorkerWorker} instance @ivar port: TCP port to connect to @ivar server_connection_string: description string for the server endpoint @ivar client_connection_string_tpl: description string template for the client endpoint (expects to passed 'port') @ivar endpoint: endpoint controlling the outbound connection from worker to master """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) # set the worker port to a loopback address with unspecified # port self.pbmanager = self.master.pbmanager = pbmanager.PBManager() yield self.pbmanager.setServiceParent(self.master) # remove the fakeServiceParent from fake service hierarchy, and replace # by a real one yield self.master.workers.disownServiceParent() self.workers = self.master.workers = workermanager.WorkerManager( self.master) yield self.workers.setServiceParent(self.master) self.botmaster = botmaster.BotMaster() yield self.botmaster.setServiceParent(self.master) self.master.status = master.Status() yield self.master.status.setServiceParent(self.master) self.master.botmaster = self.botmaster self.master.data.updates.workerConfigured = lambda *a, **k: None yield self.master.startService() self.buildworker = None self.port = None self.workerworker = None self.endpoint = None self.broker = None self._detach_deferreds = [] # patch in our FakeBuilder for the regular Builder class self.patch(botmaster, 'Builder', FakeBuilder) self.server_connection_string = "tcp:0:interface=127.0.0.1" self.client_connection_string_tpl = "tcp:host=127.0.0.1:port={port}" def tearDown(self): if self.broker: del self.broker if self.endpoint: del self.endpoint deferreds = self._detach_deferreds + [ self.pbmanager.stopService(), self.botmaster.stopService(), self.workers.stopService(), ] # if the worker is still attached, wait for it to detach, too if self.buildworker and self.buildworker.detach_d: deferreds.append(self.buildworker.detach_d) return defer.gatherResults(deferreds) @defer.inlineCallbacks def addWorker(self, **kwargs): """ Create a master-side worker instance and add it to the BotMaster @param **kwargs: arguments to pass to the L{Worker} constructor. """ self.buildworker = MyWorker("testworker", "pw", **kwargs) # reconfig the master to get it set up new_config = self.master.config new_config.protocols = {"pb": {"port": self.server_connection_string}} new_config.workers = [self.buildworker] new_config.builders = [config.BuilderConfig( name='bldr', workername='testworker', factory=factory.BuildFactory())] yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) yield self.workers.reconfigServiceWithBuildbotConfig(new_config) # as part of the reconfig, the worker registered with the pbmanager, so # get the port it was assigned self.port = self.buildworker.registration.getPBPort() def connectWorker(self, waitForBuilderList=True): """ Connect a worker the master via PB @param waitForBuilderList: don't return until the setBuilderList has been called @returns: L{FakeWorkerWorker} and a Deferred that will fire when it is detached; via deferred """ factory = pb.PBClientFactory() creds = credentials.UsernamePassword(b"testworker", b"pw") setBuilderList_d = defer.Deferred() workerworker = FakeWorkerWorker( lambda: setBuilderList_d.callback(None)) login_d = factory.login(creds, workerworker) @login_d.addCallback def logged_in(persp): workerworker.setMasterPerspective(persp) # set up to hear when the worker side disconnects workerworker.detach_d = defer.Deferred() persp.broker.notifyOnDisconnect( lambda: workerworker.detach_d.callback(None)) self._detach_deferreds.append(workerworker.detach_d) return workerworker self.endpoint = clientFromString( reactor, self.client_connection_string_tpl.format(port=self.port)) connected_d = self.endpoint.connect(factory) dlist = [connected_d, login_d] if waitForBuilderList: dlist.append(setBuilderList_d) d = defer.DeferredList(dlist, consumeErrors=True, fireOnOneErrback=True) d.addCallback(lambda _: workerworker) return d def workerSideDisconnect(self, worker): """Disconnect from the worker side""" worker.master_persp.broker.transport.loseConnection() @defer.inlineCallbacks def test_connect_disconnect(self): """Test a single worker connecting and disconnecting.""" yield self.addWorker() # connect worker = yield self.connectWorker() # disconnect self.workerSideDisconnect(worker) # wait for the resulting detach yield worker.waitForDetach() @defer.inlineCallbacks def test_tls_connect_disconnect(self): """Test with TLS or SSL endpoint. According to the deprecation note for the SSL client endpoint, the TLS endpoint is supported from Twistd 16.0. TODO add certificate verification (also will require some conditionals on various versions, including PyOpenSSL, service_identity. The CA used to generate the testing cert is in ``PKI_DIR/ca`` """ def escape_colon(path): # on windows we can't have \ as it serves as the escape character for : return path.replace('\\', '/').replace(':', '\\:') self.server_connection_string = ( "ssl:port=0:certKey={pub}:privateKey={priv}:" + "interface=127.0.0.1").format( pub=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.crt')), priv=escape_colon(os.path.join(PKI_DIR, '127.0.0.1.key'))) self.client_connection_string_tpl = "ssl:host=127.0.0.1:port={port}" yield self.addWorker() # connect worker = yield self.connectWorker() # disconnect self.workerSideDisconnect(worker) # wait for the resulting detach yield worker.waitForDetach() @defer.inlineCallbacks def test_worker_info(self): yield self.addWorker() worker = yield self.connectWorker() props = self.buildworker.worker_status.info # check worker info passing self.assertEqual(props.getProperty("info"), "here") # check worker info passing with UTF-8 self.assertEqual(props.getProperty("os_release"), b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()) self.assertEqual(props.getProperty(b'\xe3\x83\xaa\xe3\x83\xaa\xe3\x83\xbc\xe3\x82' b'\xb9\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()), b'\xe3\x83\x86\xe3\x82\xb9\xe3\x83\x88'.decode()) self.assertEqual(props.getProperty("none"), None) self.assertEqual(props.getProperty("numcpus"), 1) self.workerSideDisconnect(worker) yield worker.waitForDetach() @defer.inlineCallbacks def _test_duplicate_worker(self): yield self.addWorker() # connect first worker worker1 = yield self.connectWorker() # connect second worker; this should fail try: yield self.connectWorker(waitForBuilderList=False) connect_failed = False except Exception: connect_failed = True self.assertTrue(connect_failed) # disconnect both and wait for that to percolate self.workerSideDisconnect(worker1) yield worker1.waitForDetach() # flush the exception logged for this on the master self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def _test_duplicate_worker_old_dead(self): yield self.addWorker() # connect first worker worker1 = yield self.connectWorker() # monkeypatch that worker to fail with PBConnectionLost when its # remote_print method is called def remote_print(message): worker1.master_persp.broker.transport.loseConnection() raise pb.PBConnectionLost("fake!") worker1.remote_print = remote_print # connect second worker; this should succeed, and the old worker # should be disconnected. worker2 = yield self.connectWorker() # disconnect both and wait for that to percolate self.workerSideDisconnect(worker2) yield worker1.waitForDetach() # flush the exception logged for this on the worker self.assertEqual(len(self.flushLoggedErrors(pb.PBConnectionLost)), 1) buildbot-2.6.0/master/buildbot/test/integration/test_worker_kubernetes.py000066400000000000000000000115121361162603000270730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest.case import SkipTest from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase from buildbot.util import kubeclientservice from buildbot.worker import kubernetes # This integration test creates a master and kubernetes worker environment, # It requires a kubernetes cluster up and running. It tries to get the config # like loading "~/.kube/config" files or environment variable. # You can use minikube to create a kubernetes environment for development: # # See https://github.com/kubernetes/minikube for full documentation # minikube start # [--vm-driver=kvm] # # export masterFQDN=$(ip route get $(minikube ip)| awk '{ print $5 }') # export KUBE_NAMESPACE=`kubectl config get-contexts \`kubectl config current-context\` |tail -n1 |awk '{print $5}'` # useful commands: # - 'minikube dashboard' to display WebUI of the kubernetes cluster # - 'minikube ip' to display the IP of the kube-apimaster # - 'minikube ssh' to get a shell into the minikube VM # following environment variable can be used to stress concurrent worker startup NUM_CONCURRENT = int(os.environ.get("KUBE_TEST_NUM_CONCURRENT_BUILD", 1)) class KubernetesMaster(RunMasterBase): timeout = 200 def setUp(self): if "TEST_KUBERNETES" not in os.environ: raise SkipTest( "kubernetes integration tests only run when environment " "variable TEST_KUBERNETES is set") if 'masterFQDN' not in os.environ: raise SkipTest( "you need to export masterFQDN. You have example in the test file. " "Make sure that you're spawned worker can callback this IP") @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig( masterConfig(num_concurrent=NUM_CONCURRENT), startWorker=False) yield self.doForceBuild() builds = yield self.master.data.get(("builds", )) # if there are some retry, there will be more builds self.assertEqual(len(builds), 1 + NUM_CONCURRENT) for b in builds: self.assertEqual(b['results'], SUCCESS) class KubernetesMasterTReq(KubernetesMaster): def setup(self): super().setUp() self.patch(kubernetes.KubeClientService, 'PREFER_TREQ', True) # master configuration def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger( schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["kubernetes0"], factory=f), BuilderConfig( name="build", workernames=["kubernetes" + str(i) for i in range(num_concurrent)], factory=f2) ] masterFQDN = os.environ.get('masterFQDN') c['workers'] = [ kubernetes.KubeLatentWorker( 'kubernetes' + str(i), 'buildbot/buildbot-worker', kube_config=kubeclientservice.KubeCtlProxyConfigLoader( namespace=os.getenv("KUBE_NAMESPACE", "default")), masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} c['protocols'] = {"pb": {"port": "tcp:9989"}} return c buildbot-2.6.0/master/buildbot/test/integration/test_worker_latent.py000066400000000000000000001626341361162603000262270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python.failure import Failure from twisted.spread import pb from buildbot.config import BuilderConfig from buildbot.interfaces import LatentWorkerCannotSubstantiate from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.machine.latent import States as MachineStates from buildbot.process.factory import BuildFactory from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.test.fake.latent import LatentController from buildbot.test.fake.machine import LatentMachineController from buildbot.test.fake.step import BuildStepController from buildbot.test.util.integration import RunFakeMasterTestCase from buildbot.test.util.misc import TimeoutableTestCase from buildbot.test.util.patch_delay import patchForDelay from buildbot.worker.latent import States class TestException(Exception): """ An exception thrown in tests. """ class Latent(TimeoutableTestCase, RunFakeMasterTestCase): def tearDown(self): # Flush the errors logged by the master stop cancelling the builds. self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) super().tearDown() @defer.inlineCallbacks def create_single_worker_config(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local"], factory=BuildFactory(), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_id = yield master.data.updates.findBuilderId('testy') return controller, master, builder_id @defer.inlineCallbacks def create_single_worker_config_with_step(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) stepcontroller = BuildStepController() config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local"], factory=BuildFactory([stepcontroller.step]), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_id = yield master.data.updates.findBuilderId('testy') return controller, stepcontroller, master, builder_id @defer.inlineCallbacks def create_single_worker_two_builder_config(self, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} controller = LatentController(self, 'local', **controller_kwargs) config_dict = { 'builders': [ BuilderConfig(name="testy-1", workernames=["local"], factory=BuildFactory(), ), BuilderConfig(name="testy-2", workernames=["local"], factory=BuildFactory(), ), ], 'workers': [controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_ids = [ (yield master.data.updates.findBuilderId('testy-1')), (yield master.data.updates.findBuilderId('testy-2')), ] return controller, master, builder_ids @defer.inlineCallbacks def test_latent_workers_start_in_parallel(self): """ If there are two latent workers configured, and two build requests for them, both workers will start substantiating concurrently. """ controllers = [ LatentController(self, 'local1'), LatentController(self, 'local2'), ] config_dict = { 'builders': [ BuilderConfig(name="testy", workernames=["local1", "local2"], factory=BuildFactory()), ], 'workers': [controller.worker for controller in controllers], 'protocols': {'null': {}}, 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_id = yield master.data.updates.findBuilderId('testy') # Request two builds. for i in range(2): yield self.createBuildrequest(master, [builder_id]) # Check that both workers were requested to start. self.assertEqual(controllers[0].starting, True) self.assertEqual(controllers[1].starting, True) for controller in controllers: controller.start_instance(True) yield controller.auto_stop(True) @defer.inlineCallbacks def test_refused_substantiations_get_requeued(self): """ If a latent worker refuses to substantiate, the build request becomes unclaimed. """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # Indicate that the worker can't start an instance. controller.start_instance(False) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield self.assertBuildResults(1, RETRY) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_failed_substantiations_get_requeued(self): """ If a latent worker fails to substantiate, the build request becomes unclaimed. """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # The worker fails to substantiate. controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield self.assertBuildResults(1, RETRY) yield controller.auto_stop(True) @defer.inlineCallbacks def test_failed_substantiations_get_exception(self): """ If a latent worker fails to substantiate, the result is an exception. """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest yield self.createBuildrequest(master, [builder_id]) # The worker fails to substantiate. controller.start_instance( Failure(LatentWorkerCannotSubstantiate("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(LatentWorkerCannotSubstantiate) # When the substantiation fails, the result is an exception. yield self.assertBuildResults(1, EXCEPTION) yield controller.auto_stop(True) @defer.inlineCallbacks def test_worker_accepts_builds_after_failure(self): """ If a latent worker fails to substantiate, the worker is still able to accept jobs. """ controller, master, builder_id = \ yield self.create_single_worker_config() yield controller.auto_stop(True) # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # The worker fails to substantiate. controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # The retry logic should only trigger after a exponential backoff self.assertEqual(controller.starting, False) # advance the time to the point where we should retry master.reactor.advance(controller.worker.quarantine_initial_timeout) # If the worker started again after the failure, then the retry logic will have # already kicked in to start a new build on this (the only) worker. We check that # a new instance was requested, which indicates that the worker # accepted the build. self.assertEqual(controller.starting, True) # The worker fails to substantiate(again). controller.start_instance( Failure(TestException("substantiation failed"))) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) yield self.assertBuildResults(1, RETRY) # advance the time to the point where we should not retry master.reactor.advance(controller.worker.quarantine_initial_timeout) self.assertEqual(controller.starting, False) # advance the time to the point where we should retry master.reactor.advance(controller.worker.quarantine_initial_timeout) self.assertEqual(controller.starting, True) controller.auto_start(True) controller.auto_stop(True) @defer.inlineCallbacks def test_worker_multiple_substantiations_succeed(self): """ If multiple builders trigger try to substantiate a worker at the same time, if the substantiation succeeds then all of the builds proceed. """ controller, master, builder_ids = \ yield self.create_single_worker_two_builder_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, builder_ids) # The worker succeeds to substantiate. controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) yield controller.auto_stop(True) @defer.inlineCallbacks def test_very_late_detached_after_substantiation(self): ''' A latent worker may detach at any time after stop_instance() call. Make sure it works at the most late detachment point, i.e. when we're substantiating again. ''' controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.createBuildrequest(master, [builder_id]) self.assertTrue(controller.starting) controller.auto_disconnect_worker = False yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) # stop the instance, but don't disconnect the worker up to until just # before we complete start_instance() self.assertTrue(controller.stopping) yield controller.stop_instance(True) self.assertTrue(controller.stopped) yield self.createBuildrequest(master, [builder_id]) self.assertTrue(controller.starting) yield controller.disconnect_worker() yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) yield controller.disconnect_worker() @defer.inlineCallbacks def test_substantiation_during_stop_instance(self): ''' If a latent worker detaches before stop_instance() completes and we start a build then it should start successfully without causing an erroneous cancellation of the substantiation request. ''' controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) # Trigger a single buildrequest yield self.createBuildrequest(master, [builder_id]) self.assertEqual(True, controller.starting) # start instance controller.auto_disconnect_worker = False controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) self.assertTrue(controller.stopping) yield controller.disconnect_worker() # now create a buildrequest that will substantiate the build. It should # either not start at all until the instance finished substantiating, # or the substantiation request needs to be recorded and start # immediately after stop_instance completes. yield self.createBuildrequest(master, [builder_id]) yield controller.stop_instance(True) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) yield controller.disconnect_worker() @defer.inlineCallbacks def test_substantiation_during_stop_instance_canStartBuild_race(self): ''' If build attempts substantiation after the latent worker detaches, but stop_instance() is not completed yet, then we should successfully complete substantiation without causing an erroneous cancellation. The above sequence of events was possible even if canStartBuild checked for a in-progress insubstantiation, as if the build is scheduled before insubstantiation, its start could be delayed until when stop_instance() is in progress. ''' controller, master, builder_ids = \ yield self.create_single_worker_two_builder_config( controller_kwargs=dict(build_wait_timeout=1)) # Trigger a single buildrequest yield self.createBuildrequest(master, [builder_ids[0]]) self.assertEqual(True, controller.starting) # start instance controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) with patchForDelay('buildbot.process.builder.Builder.maybeStartBuild') as delay: # create a build request which will result in a build, but it won't # attempt to substantiate until after stop_instance() is in progress yield self.createBuildrequest(master, [builder_ids[1]]) self.assertEqual(len(delay), 1) self.reactor.advance(1) self.assertTrue(controller.stopping) delay.fire() yield controller.stop_instance(True) self.assertTrue(controller.starting) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) @defer.inlineCallbacks def test_insubstantiation_during_substantiation_refuses_substantiation(self): """ If a latent worker gets insubstantiation() during substantiation, then it should refuse to substantiate. """ controller, master, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) # insubstantiate during start_instance(). Note that failed substantiation is notified only # after the latent workers completes start-stop cycle. yield self.createBuildrequest(master, [builder_id]) d = controller.worker.insubstantiate() controller.start_instance(False) controller.stop_instance(True) yield d yield self.assertBuildResults(1, RETRY) @defer.inlineCallbacks def test_substantiation_cancelled_by_insubstantiation_when_waiting_for_insubstantiation(self): """ We should cancel substantiation if we insubstantiate when that substantiation is waiting on current insubstantiation to finish """ controller, master, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.createBuildrequest(master, [builder_id]) # put the worker into insubstantiation phase controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) self.assertTrue(controller.stopping) # build should wait on the insubstantiation yield self.createBuildrequest(master, [builder_id]) self.assertEqual(controller.worker.state, States.INSUBSTANTIATING_SUBSTANTIATING) # build should be requeued if we insubstantiate. d = controller.worker.insubstantiate() controller.stop_instance(True) yield d yield self.assertBuildResults(2, RETRY) @defer.inlineCallbacks def test_stalled_substantiation_then_timeout_get_requeued(self): """ If a latent worker substantiate, but not connect, and then be unsubstantiated, the build request becomes unclaimed. """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # We never start the worker, rather timeout it. master.reactor.advance(controller.worker.missing_timeout) # Flush the errors logged by the failure. self.flushLoggedErrors(defer.TimeoutError) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield controller.start_instance(False) yield controller.auto_stop(True) @defer.inlineCallbacks def test_sever_connection_before_ping_then_timeout_get_requeued(self): """ If a latent worker connects, but its connection is severed without notification in the TCP layer, we successfully wait until TCP times out and requeue the build. """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) bsid, brids = yield self.createBuildrequest(master, [builder_id]) # sever connection just before ping() with patchForDelay('buildbot.process.workerforbuilder.AbstractWorkerForBuilder.ping') as delay: yield controller.start_instance(True) controller.sever_connection() delay.fire() # lose connection after TCP times out self.reactor.advance(100) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # the worker will be put into quarantine self.reactor.advance(controller.worker.quarantine_initial_timeout) yield controller.stop_instance(True) yield controller.start_instance(True) yield self.assertBuildResults(2, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_failed_sendBuilderList_get_requeued(self): """ sendBuilderList can fail due to missing permissions on the workdir, the build request becomes unclaimed """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) logs = [] yield master.mq.startConsuming( lambda key, log: logs.append(log), ('logs', None, 'new')) # The worker succeed to substantiate def remote_setBuilderList(self, dirs): raise TestException("can't create dir") controller.patchBot(self, 'remote_setBuilderList', remote_setBuilderList) controller.start_instance(True) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) # should get 2 logs (html and txt) with proper information in there self.assertEqual(len(logs), 2) logs_by_name = {} for _log in logs: fulllog = yield master.data.get(("logs", str(_log['logid']), "raw")) logs_by_name[fulllog['filename']] = fulllog['raw'] for i in ["err_text", "err_html"]: self.assertIn("can't create dir", logs_by_name[i]) # make sure stacktrace is present in html self.assertIn("buildbot.test.integration.test_worker_latent.TestException", logs_by_name[i]) yield controller.auto_stop(True) @defer.inlineCallbacks def test_failed_ping_get_requeued(self): """ sendBuilderList can fail due to missing permissions on the workdir, the build request becomes unclaimed """ controller, master, builder_id = \ yield self.create_single_worker_config() # Trigger a buildrequest bsid, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) logs = [] yield master.mq.startConsuming( lambda key, log: logs.append(log), ('logs', None, 'new')) # The worker succeed to substantiate def remote_print(self, msg): if msg == "ping": raise TestException("can't ping") controller.patchBot(self, 'remote_print', remote_print) controller.start_instance(True) # Flush the errors logged by the failure. self.flushLoggedErrors(TestException) # When the substantiation fails, the buildrequest becomes unclaimed. self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) # should get 2 logs (html and txt) with proper information in there self.assertEqual(len(logs), 2) logs_by_name = {} for _log in logs: fulllog = yield master.data.get(("logs", str(_log['logid']), "raw")) logs_by_name[fulllog['filename']] = fulllog['raw'] for i in ["err_text", "err_html"]: self.assertIn("can't ping", logs_by_name[i]) # make sure stacktrace is present in html self.assertIn("buildbot.test.integration.test_worker_latent.TestException", logs_by_name[i]) yield controller.auto_stop(True) @defer.inlineCallbacks def test_worker_close_connection_while_building(self): """ If the worker close connection in the middle of the build, the next build can start correctly """ controller, stepcontroller, master, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=0) ) # Request a build and disconnect midway controller.auto_disconnect_worker = False yield self.createBuildrequest(master, [builder_id]) yield controller.auto_stop(True) self.assertTrue(controller.starting) controller.start_instance(True) yield self.assertBuildResults(1, None) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # Now check that the build requeued and finished with success controller.start_instance(True) yield self.assertBuildResults(2, None) stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(2, SUCCESS) yield controller.disconnect_worker() @defer.inlineCallbacks def test_negative_build_timeout_reattach_substantiated(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side. We should still support accidental disconnections from worker side due to, e.g. network problems. """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1) ) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Now disconnect and reconnect worker and check whether we can still # build. This should not change the worker state from our side. yield controller.disconnect_worker() self.assertTrue(controller.started) yield controller.connect_worker() self.assertTrue(controller.started) yield self.createBuildrequest(master, [builder_id]) yield self.assertBuildResults(1, SUCCESS) # The only way to stop worker with negative build timeout is to # insubstantiate explicitly yield controller.auto_stop(True) yield controller.worker.insubstantiate() yield controller.disconnect_worker() @defer.inlineCallbacks def test_sever_connection_while_building(self): """ If the connection to worker is severed without TCP notification in the middle of the build, the build is re-queued and successfully restarted. """ controller, stepcontroller, master, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=0) ) # Request a build and disconnect midway yield self.createBuildrequest(master, [builder_id]) yield controller.auto_stop(True) self.assertTrue(controller.starting) controller.start_instance(True) yield self.assertBuildResults(1, None) # sever connection and lose it after TCP times out controller.sever_connection() self.reactor.advance(100) yield controller.disconnect_worker() yield self.assertBuildResults(1, RETRY) # Request one build. yield self.createBuildrequest(master, [builder_id]) controller.start_instance(True) yield self.assertBuildResults(2, None) stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_sever_connection_during_insubstantiation(self): """ If latent worker connection is severed without notification in the TCP layer, we successfully wait until TCP times out, insubstantiate and can substantiate after that. """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.createBuildrequest(master, [builder_id]) controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) # sever connection just before insubstantiation and lose it after TCP # times out with patchForDelay('buildbot.worker.base.AbstractWorker.disconnect') as delay: self.reactor.advance(1) self.assertTrue(controller.stopping) controller.sever_connection() delay.fire() yield controller.stop_instance(True) self.reactor.advance(100) yield controller.disconnect_worker() # create new build request and verify it works yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_sever_connection_during_insubstantiation_and_buildrequest(self): """ If latent worker connection is severed without notification in the TCP layer, we successfully wait until TCP times out, insubstantiate and can substantiate after that. In this the subsequent build request is created during insubstantiation """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) yield self.createBuildrequest(master, [builder_id]) controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) # sever connection just before insubstantiation and lose it after TCP # times out with patchForDelay('buildbot.worker.base.AbstractWorker.disconnect') as delay: self.reactor.advance(1) self.assertTrue(controller.stopping) yield self.createBuildrequest(master, [builder_id]) controller.sever_connection() delay.fire() yield controller.stop_instance(True) self.reactor.advance(100) yield controller.disconnect_worker() # verify the previously created build successfully completes yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.reactor.advance(1) yield controller.stop_instance(True) self.flushLoggedErrors(pb.PBConnectionLost) @defer.inlineCallbacks def test_negative_build_timeout_reattach_insubstantiating(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side, but it can disconnect and reattach from worker side due to, e.g. network problems. """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1) ) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Now start insubstantiation and disconnect and reconnect the worker. # It should not change worker state from master side. d = controller.worker.insubstantiate() self.assertTrue(controller.stopping) yield controller.disconnect_worker() self.assertTrue(controller.stopping) yield controller.connect_worker() self.assertTrue(controller.stopping) yield controller.stop_instance(True) yield d self.assertTrue(controller.stopped) yield controller.disconnect_worker() # Now substantiate the worker and verify build succeeds yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) controller.auto_disconnect_worker = True yield controller.auto_stop(True) @defer.inlineCallbacks def test_negative_build_timeout_no_disconnect_insubstantiating(self): """ When build_wait_timeout is negative, we don't disconnect the worker from our side, so it should be possible to insubstantiate and substantiate it without problems if the worker does not disconnect either. """ controller, master, builder_id = \ yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1) ) controller.auto_disconnect_worker = False controller.auto_connect_worker = False # Substantiate worker via a build yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield controller.connect_worker() yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Insubstantiate worker without disconnecting it d = controller.worker.insubstantiate() self.assertTrue(controller.stopping) yield controller.stop_instance(True) yield d self.assertTrue(controller.stopped) # Now substantiate the worker without connecting it yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) controller.auto_disconnect_worker = True yield controller.auto_stop(True) @defer.inlineCallbacks def test_negative_build_timeout_insubstantiates_on_master_shutdown(self): """ When build_wait_timeout is negative, we should still insubstantiate when master shuts down. """ controller, master, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=-1)) # Substantiate worker via a build yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) yield self.assertBuildResults(1, SUCCESS) self.assertTrue(controller.started) # Shutdown master d = master.stopService() yield controller.stop_instance(True) yield d @defer.inlineCallbacks def test_stop_instance_synchronous_exception(self): """ Throwing a synchronous exception from stop_instance should allow subsequent build to start. """ controller, master, builder_id = yield self.create_single_worker_config( controller_kwargs=dict(build_wait_timeout=1)) controller.auto_stop(True) # patch stop_instance() to raise exception synchronously def raise_stop_instance(fast): raise TestException() real_stop_instance = controller.worker.stop_instance controller.worker.stop_instance = raise_stop_instance # create a build and wait for stop yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) self.reactor.advance(1) yield self.assertBuildResults(1, SUCCESS) self.flushLoggedErrors(TestException) # unpatch stop_instance() and call it to cleanup state of fake worker controller controller.worker.stop_instance = real_stop_instance yield controller.worker.stop_instance(False) self.reactor.advance(1) # subsequent build should succeed yield self.createBuildrequest(master, [builder_id]) yield controller.start_instance(True) self.reactor.advance(1) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_build_stop_with_cancelled_during_substantiation(self): """ If a build is stopping during latent worker substantiating, the build becomes cancelled """ controller, master, builder_id = \ yield self.create_single_worker_config() builder = master.botmaster.builders['testy'] # Trigger a buildrequest yield self.createBuildrequest(master, [builder_id]) # Stop the build build = builder.getBuild(0) build.stopBuild('no reason', results=CANCELLED) # Indicate that the worker can't start an instance. controller.start_instance(False) yield self.assertBuildResults(1, CANCELLED) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_build_stop_with_retry_during_substantiation(self): """ If master is shutting down during latent worker substantiating, the build becomes retry. """ controller, master, builder_id = \ yield self.create_single_worker_config() builder = master.botmaster.builders['testy'] # Trigger a buildrequest _, brids = yield self.createBuildrequest(master, [builder_id]) unclaimed_build_requests = [] yield master.mq.startConsuming( lambda key, request: unclaimed_build_requests.append(request), ('buildrequests', None, 'unclaimed')) # Stop the build build = builder.getBuild(0) build.stopBuild('no reason', results=RETRY) # Indicate that the worker can't start an instance. controller.start_instance(False) yield self.assertBuildResults(1, RETRY) self.assertEqual( set(brids), {req['buildrequestid'] for req in unclaimed_build_requests} ) yield controller.auto_stop(True) self.flushLoggedErrors(LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_rejects_build_on_instance_with_different_type_timeout_zero(self): """ If latent worker supports getting its instance type from properties that are rendered from build then the buildrequestdistributor must not schedule any builds on workers that are running different instance type than what these builds will require. """ controller, stepcontroller, master, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict( kind=Interpolate('%(prop:worker_kind)s'), build_wait_timeout=0 ) ) # create build request yield self.createBuildrequest(master, [builder_id], properties=Properties(worker_kind='a')) # start the build and verify the kind of the worker. Note that the # buildmaster needs to restart the worker in order to change the worker # kind, so we allow it both to auto start and stop self.assertEqual(True, controller.starting) controller.auto_start(True) yield controller.auto_stop(True) self.assertEqual((yield controller.get_started_kind()), 'a') # before the other build finished, create another build request yield self.createBuildrequest(master, [builder_id], properties=Properties(worker_kind='b')) stepcontroller.finish_step(SUCCESS) # give the botmaster chance to insubstantiate the worker and # maybe substantiate it for the pending build the builds on worker self.reactor.advance(0.1) # verify that the second build restarted with the expected instance # kind self.assertEqual((yield controller.get_started_kind()), 'b') stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_rejects_build_on_instance_with_different_type_timeout_nonzero(self): """ If latent worker supports getting its instance type from properties that are rendered from build then the buildrequestdistributor must not schedule any builds on workers that are running different instance type than what these builds will require. """ controller, stepcontroller, master, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict( kind=Interpolate('%(prop:worker_kind)s'), build_wait_timeout=5 ) ) # create build request yield self.createBuildrequest(master, [builder_id], properties=Properties(worker_kind='a')) # start the build and verify the kind of the worker. Note that the # buildmaster needs to restart the worker in order to change the worker # kind, so we allow it both to auto start and stop self.assertEqual(True, controller.starting) controller.auto_start(True) yield controller.auto_stop(True) self.assertEqual((yield controller.get_started_kind()), 'a') # before the other build finished, create another build request yield self.createBuildrequest(master, [builder_id], properties=Properties(worker_kind='b')) stepcontroller.finish_step(SUCCESS) # give the botmaster chance to insubstantiate the worker and # maybe substantiate it for the pending build the builds on worker self.reactor.advance(0.1) # verify build has not started, even though the worker is waiting # for one self.assertIsNone((yield master.db.builds.getBuild(2))) self.assertTrue(controller.started) # wait until the latent worker times out, is insubstantiated, # is substantiated because of pending buildrequest and starts the build self.reactor.advance(6) self.assertIsNotNone((yield master.db.builds.getBuild(2))) # verify that the second build restarted with the expected instance # kind self.assertEqual((yield controller.get_started_kind()), 'b') stepcontroller.finish_step(SUCCESS) yield self.assertBuildResults(1, SUCCESS) yield self.assertBuildResults(2, SUCCESS) @defer.inlineCallbacks def test_supports_no_build_for_substantiation(self): """ Abstract latent worker should support being substantiated without a build and then insubstantiated. """ controller, _, _ = \ yield self.create_single_worker_config() controller.worker.substantiate(None, None) controller.start_instance(True) self.assertTrue(controller.started) d = controller.worker.insubstantiate() controller.stop_instance(True) yield d @defer.inlineCallbacks def test_supports_no_build_for_substantiation_accepts_build_later(self): """ Abstract latent worker should support being substantiated without a build and then accept a build request. """ controller, stepcontroller, master, builder_id = \ yield self.create_single_worker_config_with_step( controller_kwargs=dict(build_wait_timeout=1)) controller.worker.substantiate(None, None) controller.start_instance(True) self.assertTrue(controller.started) self.createBuildrequest(master, [builder_id]) stepcontroller.finish_step(SUCCESS) self.reactor.advance(1) controller.stop_instance(True) class LatentWithLatentMachine(TimeoutableTestCase, RunFakeMasterTestCase): def tearDown(self): # Flush the errors logged by the master stop cancelling the builds. self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) super().tearDown() @defer.inlineCallbacks def create_single_worker_config(self, build_wait_timeout=0): machine_controller = LatentMachineController( name='machine1', build_wait_timeout=build_wait_timeout) worker_controller = LatentController(self, 'worker1', machine_name='machine1') step_controller = BuildStepController() config_dict = { 'machines': [machine_controller.machine], 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory([step_controller.step]), ), ], 'workers': [worker_controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } master = yield self.getMaster(config_dict) builder_id = yield master.data.updates.findBuilderId('builder1') return (machine_controller, worker_controller, step_controller, master, builder_id) @defer.inlineCallbacks def create_two_worker_config(self, build_wait_timeout=0, controller_kwargs=None): if not controller_kwargs: controller_kwargs = {} machine_controller = LatentMachineController( name='machine1', build_wait_timeout=build_wait_timeout) worker1_controller = LatentController(self, 'worker1', machine_name='machine1', **controller_kwargs) worker2_controller = LatentController(self, 'worker2', machine_name='machine1', **controller_kwargs) step1_controller = BuildStepController() step2_controller = BuildStepController() config_dict = { 'machines': [machine_controller.machine], 'builders': [ BuilderConfig(name="builder1", workernames=["worker1"], factory=BuildFactory([step1_controller.step]), ), BuilderConfig(name="builder2", workernames=["worker2"], factory=BuildFactory([step2_controller.step]), ), ], 'workers': [worker1_controller.worker, worker2_controller.worker], 'protocols': {'null': {}}, # Disable checks about missing scheduler. 'multiMaster': True, } master = yield self.getMaster(config_dict) builder1_id = yield master.data.updates.findBuilderId('builder1') builder2_id = yield master.data.updates.findBuilderId('builder2') return (machine_controller, [worker1_controller, worker2_controller], [step1_controller, step2_controller], master, [builder1_id, builder2_id]) @defer.inlineCallbacks def test_1worker_starts_and_stops_after_single_build_success(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(True) self.assertTrue(worker_controller.started) step_controller.finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_starts_and_stops_after_single_build_failure(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(True) self.assertTrue(worker_controller.started) step_controller.finish_step(FAILURE) self.reactor.advance(0) # force deferred stop call to be executed machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_stops_machine_after_timeout(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config( build_wait_timeout=5) worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(True) self.reactor.advance(10.0) step_controller.finish_step(SUCCESS) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # put clock 5s after step finish, machine should start suspending self.reactor.advance(0.1) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPING) machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_does_not_stop_machine_machine_after_timeout_during_build(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config( build_wait_timeout=5) worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(True) self.reactor.advance(10.0) step_controller.finish_step(SUCCESS) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # create build request while machine is still awake. It should not # suspend regardless of how much time passes self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) yield self.createBuildrequest(master, [builder_id]) self.reactor.advance(5.1) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) step_controller.finish_step(SUCCESS) self.reactor.advance(4.9) self.assertEqual(machine_controller.machine.state, MachineStates.STARTED) # put clock 5s after step finish, machine should start suspending self.reactor.advance(0.1) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPING) machine_controller.stop_machine() self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_1worker_insubstantiated_after_start_failure(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config() worker_controller.auto_connect_worker = False worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(False) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.assertEqual(worker_controller.started, False) @defer.inlineCallbacks def test_1worker_eats_exception_from_start_machine(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config() worker_controller.auto_connect_worker = False worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) class FakeError(Exception): pass machine_controller.start_machine(FakeError('start error')) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.assertEqual(worker_controller.started, False) self.flushLoggedErrors(FakeError) @defer.inlineCallbacks def test_1worker_eats_exception_from_stop_machine(self): machine_controller, worker_controller, step_controller, \ master, builder_id = yield self.create_single_worker_config() worker_controller.auto_start(True) worker_controller.auto_stop(True) yield self.createBuildrequest(master, [builder_id]) machine_controller.start_machine(True) step_controller.finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed class FakeError(Exception): pass machine_controller.stop_machine(FakeError('stop error')) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) self.flushLoggedErrors(FakeError) @defer.inlineCallbacks def test_2workers_build_substantiates_insubstantiates_both_workers(self): machine_controller, worker_controllers, step_controllers, \ master, builder_ids = yield self.create_two_worker_config( controller_kwargs=dict(starts_without_substantiate=True)) for wc in worker_controllers: wc.auto_start(True) wc.auto_stop(True) yield self.createBuildrequest(master, [builder_ids[0]]) machine_controller.start_machine(True) for wc in worker_controllers: self.assertTrue(wc.started) step_controllers[0].finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() for wc in worker_controllers: self.assertFalse(wc.started) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_2workers_two_builds_start_machine_concurrently(self): machine_controller, worker_controllers, step_controllers, \ master, builder_ids = yield self.create_two_worker_config() for wc in worker_controllers: wc.auto_start(True) wc.auto_stop(True) yield self.createBuildrequest(master, [builder_ids[0]]) self.assertEqual(machine_controller.machine.state, MachineStates.STARTING) yield self.createBuildrequest(master, [builder_ids[1]]) machine_controller.start_machine(True) for wc in worker_controllers: self.assertTrue(wc.started) step_controllers[0].finish_step(SUCCESS) step_controllers[1].finish_step(SUCCESS) self.reactor.advance(0) # force deferred suspend call to be executed machine_controller.stop_machine() for wc in worker_controllers: self.assertFalse(wc.started) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) @defer.inlineCallbacks def test_2workers_insubstantiated_after_one_start_failure(self): machine_controller, worker_controllers, step_controllers, \ master, builder_ids = yield self.create_two_worker_config() for wc in worker_controllers: wc.auto_connect_worker = False wc.auto_start(True) wc.auto_stop(True) yield self.createBuildrequest(master, [builder_ids[0]]) machine_controller.start_machine(False) self.assertEqual(machine_controller.machine.state, MachineStates.STOPPED) for wc in worker_controllers: self.assertEqual(wc.started, False) buildbot-2.6.0/master/buildbot/test/integration/test_worker_marathon.py000066400000000000000000000113361361162603000265410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest.case import SkipTest from twisted.internet import defer from buildbot.config import BuilderConfig from buildbot.plugins import schedulers from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.process.results import SUCCESS from buildbot.test.util.integration import RunMasterBase from buildbot.worker.marathon import MarathonLatentWorker # This integration test creates a master and marathon worker environment, # It requires environment variable set to your marathon hosting. # you can use the mesos-compose to create a marathon environment for development: # git clone https://github.com/bobrik/mesos-compose.git # cd mesos-compose # make run # then set the environment variable to run the test: # export BBTEST_MARATHON_URL=http://localhost:8080 # following environment variable can be used to stress concurrent worker startup NUM_CONCURRENT = int(os.environ.get("MARATHON_TEST_NUM_CONCURRENT_BUILD", 1)) # if you run the stress test against a real mesos deployment, you want to also use https and basic credentials # export BBTEST_MARATHON_CREDS=login:passwd class MarathonMaster(RunMasterBase): def setUp(self): if "BBTEST_MARATHON_URL" not in os.environ: raise SkipTest( "marathon integration tests only run when environment variable BBTEST_MARATHON_URL" " is with url to Marathon api ") @defer.inlineCallbacks def test_trigger(self): yield self.setupConfig(masterConfig(num_concurrent=NUM_CONCURRENT), startWorker=False) yield self.doForceBuild() builds = yield self.master.data.get(("builds",)) # if there are some retry, there will be more builds self.assertEqual(len(builds), 1 + NUM_CONCURRENT) for b in builds: self.assertEqual(b['results'], SUCCESS) # master configuration def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["marathon0"], factory=f), BuilderConfig(name="build", workernames=["marathon" + str(i) for i in range(num_concurrent)], factory=f2)] url = os.environ.get('BBTEST_MARATHON_URL') creds = os.environ.get('BBTEST_MARATHON_CREDS') if creds is not None: user, password = creds.split(":") else: user = password = None masterFQDN = os.environ.get('masterFQDN') marathon_extra_config = { } c['workers'] = [ MarathonLatentWorker('marathon' + str(i), url, user, password, 'buildbot/buildbot-worker:master', marathon_extra_config=marathon_extra_config, masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c buildbot-2.6.0/master/buildbot/test/integration/test_worker_workerside.py000066400000000000000000000267231361162603000271140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import tempfile import time import mock from twisted.cred.error import UnauthorizedLogin from twisted.internet import defer from twisted.internet import reactor from twisted.python import util from twisted.trial import unittest import buildbot_worker.bot from buildbot import config from buildbot import pbmanager from buildbot import worker from buildbot.process import botmaster from buildbot.process import builder from buildbot.process import factory from buildbot.status import master from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import manager as workermanager PKI_DIR = util.sibpath(__file__, 'pki') # listening on port 0 says to the kernel to choose any free port (race-free) # the environment variable is handy for repetitive test launching with # introspecting tools (tcpdump, wireshark...) DEFAULT_PORT = os.environ.get("BUILDBOT_TEST_DEFAULT_PORT", "0") class FakeBuilder(builder.Builder): def __init__(self, name): super().__init__(name) self.builder_status = mock.Mock() def attached(self, worker, commands): return defer.succeed(None) def detached(self, worker): pass def getOldestRequestTime(self): return 0 def maybeStartBuild(self): return defer.succeed(None) class TestingWorker(buildbot_worker.bot.Worker): """Add more introspection and scheduling hooks to the real Worker class. @ivar tests_connected: a ``Deferred`` that's called back once the PB connection is operational (``gotPerspective``). Callbacks receive the ``Perspective`` object. @ivar tests_disconnected: a ``Deferred`` that's called back upon disconnections. yielding these in an inlineCallbacks has the effect to wait on the corresponding conditions, actually allowing the services to fulfill them. """ def __init__(self, *args, **kwargs): super(TestingWorker, self).__init__(*args, **kwargs) self.tests_disconnected = defer.Deferred() self.tests_connected = defer.Deferred() self.tests_login_failed = defer.Deferred() self.master_perspective = None orig_got_persp = self.bf.gotPerspective orig_failed_get_persp = self.bf.failedToGetPerspective def gotPerspective(persp): orig_got_persp(persp) self.master_perspective = persp self.tests_connected.callback(persp) persp.broker.notifyOnDisconnect( lambda: self.tests_disconnected.callback(None)) def failedToGetPerspective(why, broker): orig_failed_get_persp(why, broker) self.tests_login_failed.callback((why, broker)) self.bf.gotPerspective = gotPerspective self.bf.failedToGetPerspective = failedToGetPerspective class TestWorkerConnection(unittest.TestCase, TestReactorMixin): """ Test handling of connections from real worker code This is meant primarily to test the worker itself. @ivar master: fake build master @ivar pbmanager: L{PBManager} instance @ivar botmaster: L{BotMaster} instance @ivar buildworker: L{worker.Worker} instance @ivar port: actual TCP port of the master PB service (fixed after call to ``addMasterSideWorker``) """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) # set the worker port to a loopback address with unspecified # port self.pbmanager = self.master.pbmanager = pbmanager.PBManager() yield self.pbmanager.setServiceParent(self.master) # remove the fakeServiceParent from fake service hierarchy, and replace # by a real one yield self.master.workers.disownServiceParent() self.workers = self.master.workers = workermanager.WorkerManager( self.master) yield self.workers.setServiceParent(self.master) self.botmaster = botmaster.BotMaster() yield self.botmaster.setServiceParent(self.master) self.master.status = master.Status() yield self.master.status.setServiceParent(self.master) self.master.botmaster = self.botmaster self.master.data.updates.workerConfigured = lambda *a, **k: None yield self.master.startService() self.buildworker = None self.port = None self.workerworker = None # patch in our FakeBuilder for the regular Builder class self.patch(botmaster, 'Builder', FakeBuilder) self.client_connection_string_tpl = r"tcp:host=127.0.0.1:port={port}" self.tmpdirs = set() @defer.inlineCallbacks def tearDown(self): for tmp in self.tmpdirs: if os.path.exists(tmp): shutil.rmtree(tmp) yield self.pbmanager.stopService() yield self.botmaster.stopService() yield self.workers.stopService() # if the worker is still attached, wait for it to detach, too if self.buildworker: yield self.buildworker.waitForCompleteShutdown() @defer.inlineCallbacks def addMasterSideWorker(self, connection_string=r"tcp:{port}:interface=127.0.0.1".format( port=DEFAULT_PORT), name="testworker", password="pw", update_port=True, **kwargs): """ Create a master-side worker instance and add it to the BotMaster @param **kwargs: arguments to pass to the L{Worker} constructor. """ self.buildworker = worker.Worker(name, password, **kwargs) # reconfig the master to get it set up new_config = self.master.config new_config.protocols = {"pb": {"port": connection_string}} new_config.workers = [self.buildworker] new_config.builders = [config.BuilderConfig( name='bldr', workername='testworker', factory=factory.BuildFactory())] yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) yield self.workers.reconfigServiceWithBuildbotConfig(new_config) if update_port: # as part of the reconfig, the worker registered with the # pbmanager, so get the port it was assigned self.port = self.buildworker.registration.getPBPort() def workerSideDisconnect(self, worker): """Disconnect from the worker side This seems a good way to simulate a broken connection. Returns a Deferred """ return worker.bf.disconnect() def addWorker(self, connection_string_tpl=r"tcp:host=127.0.0.1:port={port}", password="pw", name="testworker", keepalive=None): """Add a true Worker object to the services.""" wdir = tempfile.mkdtemp() self.tmpdirs.add(wdir) return TestingWorker(None, None, name, password, wdir, keepalive, connection_string=connection_string_tpl.format(port=self.port)) @defer.inlineCallbacks def test_connect_disconnect(self): yield self.addMasterSideWorker() def could_not_connect(): self.fail("Worker never got connected to master") timeout = reactor.callLater(10, could_not_connect) worker = self.addWorker() yield worker.startService() yield worker.tests_connected timeout.cancel() self.assertTrue('bldr' in worker.bot.builders) yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_reconnect_network(self): yield self.addMasterSideWorker() def could_not_connect(): self.fail("Worker did not reconnect in time to master") worker = self.addWorker(r"tcp:host=127.0.0.1:port={port}") yield worker.startService() yield worker.tests_connected self.assertTrue('bldr' in worker.bot.builders) timeout = reactor.callLater(10, could_not_connect) yield self.workerSideDisconnect(worker) yield worker.tests_connected timeout.cancel() yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_applicative_reconnection(self): """Test reconnection on PB errors. The worker starts with a password that the master does not accept at first, and then the master gets reconfigured to accept it. """ yield self.addMasterSideWorker() worker = self.addWorker(password="pw2") yield worker.startService() why, broker = yield worker.tests_login_failed self.assertEqual(1, len(self.flushLoggedErrors(UnauthorizedLogin))) def could_not_connect(): self.fail("Worker did not reconnect in time to master") # we have two reasons to call that again: # - we really need to instantiate a new one master-side worker, # just changing its password has it simply ignored # - we need to fix the port yield self.addMasterSideWorker( password='pw2', update_port=False, # don't know why, but it'd fail connection_string=r"tcp:{port}:interface=127.0.0.1".format(port=self.port)) timeout = reactor.callLater(10, could_not_connect) yield worker.tests_connected timeout.cancel() self.assertTrue('bldr' in worker.bot.builders) yield worker.stopService() yield worker.tests_disconnected @defer.inlineCallbacks def test_pb_keepalive(self): """Test applicative (PB) keepalives. This works by patching the master to callback a deferred on which the test waits. """ def perspective_keepalive(Connection_self): waiter = worker.keepalive_waiter if waiter is not None: waiter.callback(time.time()) worker.keepalive_waiter = None from buildbot.worker.protocols.pb import Connection self.patch(Connection, 'perspective_keepalive', perspective_keepalive) yield self.addMasterSideWorker() # short keepalive to make the test bearable to run worker = self.addWorker(keepalive=0.1) waiter = worker.keepalive_waiter = defer.Deferred() yield worker.startService() yield worker.tests_connected first = yield waiter yield worker.bf.currentKeepaliveWaiter waiter = worker.keepalive_waiter = defer.Deferred() second = yield waiter yield worker.bf.currentKeepaliveWaiter self.assertGreater(second, first) self.assertLess(second, first + 1) # seems safe enough yield worker.stopService() yield worker.tests_disconnected buildbot-2.6.0/master/buildbot/test/integration/test_www.py000066400000000000000000000143551361162603000241670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.trial import unittest from twisted.web import client from buildbot.data import connector as dataconnector from buildbot.db import connector as dbconnector from buildbot.mq import connector as mqconnector from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import www from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz from buildbot.www import service as wwwservice SOMETIME = 1348971992 OTHERTIME = 1008971992 class BodyReader(protocol.Protocol): # an IProtocol that reads the entire HTTP body and then calls back # with it def __init__(self, finishedDeferred): self.body = [] self.finishedDeferred = finishedDeferred def dataReceived(self, bytes): self.body.append(bytes) def connectionLost(self, reason): if reason.check(client.ResponseDone): self.finishedDeferred.callback(b''.join(self.body)) else: self.finishedDeferred.errback(reason) class Www(db.RealDatabaseMixin, www.RequiresWwwMixin, unittest.TestCase): master = None @defer.inlineCallbacks def setUp(self): # set up a full master serving HTTP yield self.setUpRealDatabase(table_names=['masters', 'objects', 'object_state'], sqlite_memory=False) master = fakemaster.FakeMaster(reactor) master.config.db = dict(db_url=self.db_url) master.db = dbconnector.DBConnector('basedir') yield master.db.setServiceParent(master) yield master.db.setup(check_version=False) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master # build an HTTP agent, using an explicit connection pool if Twisted # supports it (Twisted 13.0.0 and up) if hasattr(client, 'HTTPConnectionPool'): self.pool = client.HTTPConnectionPool(reactor) self.agent = client.Agent(reactor, pool=self.pool) else: self.pool = None self.agent = client.Agent(reactor) @defer.inlineCallbacks def tearDown(self): if self.pool: yield self.pool.closeCachedConnections() if self.master: yield self.master.www.stopService() yield self.tearDownRealDatabase() @defer.inlineCallbacks def apiGet(self, url, expect200=True): pg = yield self.agent.request(b'GET', url) # this is kind of obscene, but protocols are like that d = defer.Deferred() bodyReader = BodyReader(d) pg.deliverBody(bodyReader) body = yield d # check this *after* reading the body, otherwise Trial will # complain that the response is half-read if expect200 and pg.code != 200: self.fail("did not get 200 response for '%s'" % (url,)) return json.loads(bytes2unicode(body)) def link(self, suffix): return self.url + b'api/v2/' + suffix # tests # There's no need to be exhaustive here. The intent is to test that data # can get all the way from the DB to a real HTTP client, and a few # resources will be sufficient to demonstrate that. @defer.inlineCallbacks def test_masters(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), fakedb.Master(id=8, name='other:master', active=1, last_active=OTHERTIME), ]) res = yield self.apiGet(self.link(b'masters')) self.assertEqual(res, { 'masters': [ {'active': False, 'masterid': 7, 'name': 'some:master', 'last_active': SOMETIME}, {'active': True, 'masterid': 8, 'name': 'other:master', 'last_active': OTHERTIME}, ], 'meta': { 'total': 2, }}) res = yield self.apiGet(self.link(b'masters/7')) self.assertEqual(res, { 'masters': [ {'active': False, 'masterid': 7, 'name': 'some:master', 'last_active': SOMETIME}, ], 'meta': { }}) buildbot-2.6.0/master/buildbot/test/integration/v087p1-README.txt000066400000000000000000000002641361162603000243650ustar00rootroot00000000000000-- Basic v0.8.7p1 tarball -- This tarball is the result of a couple of runs from a single incarnation of a master that was running Buildbot-0.8.7p1. Both builds were successful. buildbot-2.6.0/master/buildbot/test/integration/v087p1.tgz000066400000000000000000000410311361162603000234140ustar00rootroot00000000000000jHQ]\Sk_Ѡ8J0c( 6E)))[T DLl^ϗ}~s~Xmch- mF]}qG~F:zm}mo0b  r~?Xvb"D:tVkk7_Ol_oHӷu uXgAyx1d !8 T#2iV,;^*4 K•1\ c Te睖ϠrR91T. /='!1*7B$I@@v3.CaЩ >F#F2h*32&"Hn\U :c| \D47ޛuOEL E 0dF-*4GxEћ*^qjFit2A}Qp +E Dl vi,@'1(|8g1##hG$24 mW⥊Rœ8脡T@z03c6$Fp\D@ pӈdf6,o18 '62~΀%UhT b PJa`ZT$1e:rgG!ELP>d|8כRfƀQ0@mJ~߱F=cD'S@*/&4Hwa:U1T`OPUNǬy!ɀ5˫Z2U]QELj)c"UAIOz%LpF0cT}7:Έ! {!5TXldl2Ո*LRV&'S*I$ ) 4~p]I"@`Ӣn t\ksEQ'Śœyc@by>K Z)dֱ5h2jb`G{-qՎdLW&1>$@KQp$T&iiph0բSpѵ hoN!.~՗Y.#.TW0IIb;Q=2I,$[2.DO6Bu ssCFƺz8=3'1p{z0D+B O0(5J]H $R+t!F,?(jP2\~*APBT| }LSBu c!P`1' 9JHH9tȁ0"EaO8f0hS0 bԈ@j$8:* e1TgKY.wጉ&D*Gb.~%TƐxFƽ2O&S㘓9aV<'M5RS6CЩ D̵NMp1$'&X4Dr2u2)82eP)TE"F 5O^: RT )Hk|Af,%Bq*0XB(p"]Ed ່TH )X: %%吘$b$\d(Ь+y>5L- .|?k'i_h󿡁O*4W_s<b!HӲoQdeTԫKX"HQF9#K J)ٔ"d^H Ag%"vP1 hJGBWD~iwsv`vE Z.9M҅j-zHm4\|y^ŕw?zK;[^ߘH0p s7ikŠO>RSꡘ[%6j݂VSՉmi?2b2w-Mn^deԦOo$c1sazΙv4ĻmjV+1w\x_hyP5k}O&aNʽ })}AUl4x{`sJ;uT::'TEqe^T%sy{wZ?қ!E I ^Uՠ<Ϛ v R{Jw IrO?A犺xz7U{J WH[~Ob/RWr:+Wg1:J yv<%pPS^-w3gׂ6' -j@gh[Cmr[Suʲ' V)znmZyQNh۾W(H=S}C/={1wpl+>O2/*Y @Z+%YӃSJa{պHlh}O^e{9oيlz<d7љ3oaLpw-K~cb S`Zj<ܭ1[4nn53`3[FvuwZꔠTL"o̬H{A)Kݝ7UooL}j A M)9iW ~Dօͯ3&A8--rfB9S^Bfʼn]k׳"ܠjʟ+fSS u6ofUeցc!ΉzՕ?-wF)y뤟}ti7mUgkgk?[(}Rʑď=̐k:lXڻN}ڤɟ|߯4*na-;dN0EA}@1/0UUD}6HouG·#"m>BdxTd}{C8}ZaHDW껉 Lt՝el%'A;U@Tm T弮,˶Mn 6~ ta8T\-}s?E1 $~M%鮵a "w_lmzJXdY%VlP;+ʢ&ԡqivzu*{+\|1MM=67T?ς™Z>~⍮3t$6#iPq;.⡬bc ^V ֯<>YYǪXM%f-Xbsbka/{vUk^;[GGS w}n!j r|7[s:u=&%WZ-KiWr ^mE}Ѩ%Q;lito[B <^59{~79>N|%!jeg؆f7_S""_'pw#پre¢GjŅ:{`qX%0T轇75b*h[-Rx_M>~joQӝ} o}PZ0bFPigK;+mQ%޼㓂9w5Uw~ߨ8! 㭻x/oTѶj!w`X|]ՁkqR9ѧ msbpO84/X9ۍk7?.~:Jລ j9J{6H>q~= K#_\̿v%-d&WYxK_ukׯ%vsZ<}Ƶ}} V %nM>{_ۭWJ HNT".\77PT3 2{O:>9eAuZh([k+K߶{+7e' CydCxVcW:3 y{]+c\^6R8Jz QyOy39Jn MHSdg(I۫m2yc}N<А˸ZL?jsF% h|e\ Ň_v"Q/p8{&9Pύ.Zj-C.]ͱsC]|.RK&&Tò~ԁ\qsytq V$;UdV?ʸPO*##l6ryW\C e0yŊk"N ptѸ^D7F}ty1bwˀAW@8v%EgݶmةcnJ1yV:#PB%Wsr ڋ Ǒwy ̒EQ9F 8f bcK9687V6\,tՉ8#?A1CvubM‡|&^7?ccRvoz7N%Epr|*+]}ezUv'z1s[lH4ks+NﹷQ~tGovAzN9I곣m:şnx!V(wS\t+{-z^5naZ$_v&6X|5}їiNovIdd[~)3|`d^㠅@ `=%kkRv\slcJ\|p2p\KM_^q~i0͐n5BU;4@4ZJ﬿E;Eۙ>驛D.|St Z%n0Ts>nʦ7e<% Y=ߊr)-O;hM=J,%Ѵ"BIU~z,'|ɚc+FKў{r/{ڶ <޹NY% =*jWN\}~5M㚋%o+up[šgwV#y0&' e8gf,,vvR]J,%Wd3 |*B\wRuٴ~SNҹuZz{_8=RhmYAvsv]wŁ'E@%m7"F˾̍]~0Y73/}PO?u("#vAAן~ru/J%1\#a ^ eƸa=0#+e<67j26L")|e005U! GLfFf (fbTqD1bVT1QvQ̎I'%S&3I6TAsfl8Qe2Quz(8(>(1Fdŀ"Q$38d2fQ7`}:kAx(GfԳyV8OubE[K];z[yNy=nк.)-4ΕwU4N kkj ?XmHkM q(ZWqrΚ]>41QI>Z{[&W;lXUF'Op]I GjRtv5դ㬴fKަhIRG+W8Y-xŽþሏ*ڡUU'o`9٢!֎|Tvńe,=XfX8V7Bj̃)|=<{g~sՉO*k%58]sglK+59mYM`do+2-(;I|O,}>}YOsh NIHn7"e8cg3y. qUwig+~兿+^lp'=>-V}HZpMHꯔѭl4+.O!IKk٤ڮ\>k g/=P!ѮfWOk( О_k;`mIF W݇#O5˻h|+;jgş6M(JQ3{FIݩ3_p†xW+QjL堜DF܆KS ].ۚ^xuR+ؓzhl]dU0!(,G샴,JMc>~vǬ?Zem Qxͥ]ΝWwAEOI0F/緃-?jhzbYYay}3YەOD&+zudh6ꆫg e:4Wh?ec"hW_Kpu%EbPӼg/A&';ֶ.ͻz lsxJ ]6$õ2ջ<ۥ/qQZ24_Y^;y{S??m?P)SJDm)<$ 3c9AkQS̽/韛չ'&ÜGK 7Ys:*7Uv$H=v.)$gE0Z~YBX^ ܩHp@{Pg!'mk/r})yȌsSfU ܂B[`|DW0%sMM|s/3.3ryS'M4a8*yg9vn!S=8Ҵc0A5:Cň9~Rʹ Q uZע+zmOݵ)NbB h.g^ى}qhiQQOO?dDH4Ϸ; =xX4aƖ-u̾sU@_2[to6!-q [Dw8Y.SyQI]&O3olEDlxy1-&ơ* 'ۮƗ=ꁾbzU/y+WWSAy""WK"5(' C{ Qr94Ɠ7c =`Jlԡr7y=/n'iRLԭ3f߄!)N,KNj8kaB *Y6Kl>!|}˹++PtJQBRC1WRZ/H~&^dٌؑLo}tkIclX, sݒ >Kly{  %پFc0iBLңܽwL~]'?7ﯹ~G+bVI̔#xdIL0ktq)p/.͘ujecud U۬ B o|UZr Qu D8;4Wh,Gk>@{Ҟ îtUި$?QA|; FSm<_w.p@k↓&uZ=[3'otZV6vvl_mY0;wIܫ.w: 6 .\Nxa$#5϶L;+V9#q-hBTxj 劊\uzs3튞m7C.:.A 6ZNx|9O.szGՕ.k9/K<7O%Bٛ_FbonP\S8kb덎-,f'%NZ$&7vo'|~ΡEʎSu?gnWU ׌.)N{x%*(LCu\re_j+~W[qc׹'&1[bT_YxEU+¢FTt&<*7)\W s̀q+\p|z;=2C;TX8?rHkAq|^N1g"/} $e#2 I]^`,Ep:/tfߎDݖ8L%T\̮oev_g%41\ YoZ\dׯoL]Pc^bn`f!4=ofn,?Br?bP+aJ׶TpXN)W/_ns(ox)X1W,, ݇f%~0)Ons媖yמ&>[X@Zs,]YM3f(BFjqwT*sxr\n ).Uު0'X+B\X;fS؎|X`pCIŐہ&˱N)ӋsRߓ[v٠ׯk#_ %?;yRp]Ӂ7gy6i쿼bxg( *:?5m|&Ὥi{Bk2{9ERHBO]q`>I+)˴p4;'EX2eR{vȚrWr-Pۄ6<*<}kmgNCOD /,3v}xV_)7f*aeE-V<^]q}[P^ULwf2&ˮ/J㹓o"[*q_7De힮'I{حdö[Qfbz>G/~pPxy"i,66*$o,у<FXo$ d0wjAS{R8/Y׸HS2sϟ߄0^{`aԹjU|wY[XM)7&Yon>JMͲ |hyb kWBE7_ +YD.!5zCΛ[S+ݫiWkD%P~E+bK:} f#| Gu >ZFs#'>[t-ìjns7SG+E* :Q{d[e;g;ƅt&{z7Wz-k*\-sk[ dތm2r%ԓ$Ƶ٠~y*ZE։uo9Gt𝂈TӺ%^Yntx i~;Uf=[x*趴 W~)۴zB}V J@^YLh>qbA"%CI tt1M{Du9;s@Rwei*6\ sƊfڮ@e=В `fScA!|ԓG^/:}>{iRz7\]6lEi(Σ$uz,.TTǗ"ޔ)=]ۢ!|BUh23-r~}y;w^ XؼA_/ u;7: 8Tjy^ay~!U|.y4yUGxW_,4Oie({`sk4CFպNjp✣Mb b> {ĪbWFV۶.-nY'LR#^~KfddNѝDa uGtySŮuY{x.ee H a_Uf@,TkY%pqr#}-ߖ/r6:o  Bt* i?# =]&g"p B!@}@ I^u&MvLR y;~>ʶ TP0p"erE9l-}lQ>V..(e^L@9z}Pnu/ke`饬2_y$.p@Z*t8RK .:>c,SaP]C(1}M\H55hP0bctIF؄Ordw] =V$9- |l|#1 ˟Y(̛8jP9@gOX\ z\Ք H11!2C0 o\[(|7=\sRyV`ix mMcM#A:k'ټ ~ @>gt2 į p99 3j3BQ*#eNVg~HBAX9:;} + 8㬧) <4PPʀ㟮3Ap`hNTf.lTB969uܨ2@cT<֐PnJ u#PdaRL '4$B JgA1?b Cap%W T5jNd[¦0N9X'̛#Xbtqeg_y(!PaJ`4aXzcL=,ӬL9ܿ{b`&}iDj s[H㸹t[VYZ|>l|PDX$C}SJ &iO{l)mĸƅb{GED#?}e%a*yT0S}['<Ox NTie $8((¸9zblQHQc 0x'~b0N\b'U] Pt+g=) @`=a8 ^? =(/JfhiWRL&0F- 8ړQ CYcF,Y=oG@DqT2^ӛJ!Zt[C@mip+oaIi^Ml=w{&5Y◂>_? 0BC!'!5L?'Ghr"-%t7, .c}J7'69P”ͿGd%Ćѣ"k0#Fiጤoy,oT$Œes"~\_쒎T$)e&7.4GM[Y:d:) vtqaVD!*sMјz* 92;81%Re}rPCQ)k"'&七~ă$GјFCJ%m"8GuѨmAf0oHǹOïB"4P6)f]D'ˊYc1 B&N+S 2c_rqQ4$I9agZ )+ M2 ɱge  Me|Gԇ+1fTcj?`<(9xkٔ?a7$o-͍l>_8Aup" yՈtY\Hh ;G%#A1PdA#GNAbzidSJRUWPǫ`F#R]1*!m:ϟP$: Tɚ-0(>F4GR :,ts*϶aOCeg>@ @`6_ _ \@10^$UY`Pq8:ۇ N@)P jJC=ks~y% LJ/M o>6- 7Jx *ㅎC?!E]u2@?8d]9XWkA5qA!4 6 π\H#h*KR9&수uޙɉ?kIr!-'G☋TNp\ㅯH-Ņt׃5}v PSx@OOc`hc&4= i dL1r,.B:YQQVF>"L8~~ڼ~DBYЋ!LJҜ苙Lf" i& M"V~1{婪x`S$uU ~]__  ~a_8AupCB GZJX8l o78<8*ä {[IXPp)x?l$Ru_-˅ԐL&N0 1O,aSʟxDXBjF BH%$4y\醵׳ ӗjhPޫĠfJT-#L|:b)LLj4EN,;{4&i.p"r.2)(`dG}戓zƈ.A1T|9PGust#XG,A0 @Z=5pĿ9@ @` @?.į p낥 CHX/ ^ Y }1ks!L W1?`qd 犺H89"̞=`C?ïoösɟ%΅yqN"'gs&t8Syh8F#>@Ctq"wgC>#9h{O+SR.TmT.$ )[gD>OR>;#ǥD2#'TSY`/Fşf7Mc?K fVq/(W1RT^ۈTy/kȡF/N#81 ~8a?Xs7!|m9z^+R>;U4ɑ]5D:1%B,[,r6-fJL!Ȧ3鸔Hdl%?-Iʭnu܅xal hR,ʒV}Gf̉gf?>'z(=yfܼx'HEZ3s|s˒,>9;T,qzⱹn^ayaf 5JQ~]~S+lVɣO#;8wN N':";,v"!zۨgl"?T)EU+%.KjVEbNs.TWjeYŅǏCgbc1]92^Iuh˜BrYҔjE]Vjj_g^!@O9&O١%G/-rlǴܽf)ނAQ}6|FgX]bGQuZ3+J.j~ƐTДr`eRhCx^&O֨3dmA[XPk¶mh- 5"VWj k߬ᅩmVqZiﮠ.Ŷo.-[Psv&-ߠ+tOx~WۂRUXPK=Vu0u/Z;L[PIPnOFV*EH))ro'"{ IzZ/<=Ѡ]?ݾf*iC~/MV5UXe3|"Vx,]*RR,-ʥt\?Y?]@hП0  < >&Őo0^F?aS G@MDTsԖ jkL]jC+sSIOm͕KQm>p1a3UVM56{zhz;),jsVx/ݴFV*z蕘 E6ZݠbLz9 / SuF@ Cܭp)7ΝkRTY,C!`b H4ZwE>2vZU0ms3\Y@f/ã ~ ƣ>pۑ8flA~NgOkHK<~Oc[;3dC4rR&RY~.U vnd{wA+j$ҟĻ[pF?%3hMrG68 _fw;ps"?QJr!.KTr&VNcT19;q7E~l^$;L!R\g2R2OgBd!%bd8##xu?u;|V7>4[;yOxaT\Q*;;Wu.?"eJb]VIrY*+G/TvL;;]ɋXkh$W}VK/=Lr6R_&o $Al2;ORrF]!F1m=x, .0t}_qy7-t:&ieY5<*L3*h|Y.n*5j]h"W4z0#QY[긏!znA 9j:5?wI"ܡB)eGQs rB'zΈ'w&"Yʹj0AF׸զ=Rrm ɶ@)u ڄmBZpk?z:1{nԝvCCQuFE,xR}eW}B4/0 <  /?ϐ~]B}+/<"w~E"}Zދ @hi]}{+͠#ĺ?< Dc7#^^88  )et*$\.R~:/&D)*-Ϳ?f0_ca?2 #vRSǣ^C}Ul2Olu{Y\~uVUZ_ulDQGު( 6k*{UuY+XOo#ro5 c 39wQ^ul\U30kit:g䷻9n67|}ꖭ[$ <`nw@xCgXۼ;=RmUsdXV+fc>OX8$bW*zRZY]6;1>N \dh q1៓>L|C ePs %Lz$cmB6[Dㅇ31E_^D@ε0j_ԥuǂamW?Ej 5g7 @8!ڡZGQIec_ZOXYʒx=Z_VI-0[`(=ISz?^`/0 bc}ou]Јpt7>E"<ʐ,>{#4p,QQ(EKaxToV8=. %-%VPf%Hh[}cPmB4W/ _=2f@밈Xٞf6`jy&7(IETT lm [[~ksui^a|U]?vL !w+hЮ=KAa/_WCp30("DzV>zJJEQN4Dtht/ߋo%+@h?BƐ5_);MBf5yі?:V #eya3.Ɛ|'x܍u?F`t /̠kQkWԨk" F>!Z ܒ]Ds^^86,VF iǀ:jgSq^omw30/웱Q;]c}ul,b @v6lZLAa@_^@81L34%4nR][ ڲTY7_G*3SבL5j6HOG[M5֑xguRX4֔\VTZ (+>+r%)+I+u.&/Uy֪DzB(%U6MZ\кX6=5Nz/g]Km>-'k`l-ܓھAGy!3l)9O Lc^]PL cBt4h%6'x`->W3TOmcӾܹZ'gnqd`8pkyF^`@x?Cüؿ˷F׍8/ [ˈ,Wo (RoP0Ϟ,Trs6kɠk.adZ>og+HIEr[[P5G;xa.O@QQ.5{u\f~vZ;!zrNZ&;I~&`],& ?#w:=, @xPZ  lxp_/6H`"=}e\Vy?ٗ_o,iuE*RY(cO/o3I4D-)a_I6ګZQ{ĕ;[F~?g G̩S_y~qO 2>~r:qӇyAg"pUk*uzD̨H+Δt,{'w6hjYZnXVE".=~p2m\(v8+*=$#wfLQ^P3ׇ됭m͗aӭ'Iޣhk J!HiwfdfB\< Q /hZ; 'Z FL3!fĒ#?R'<!~Spa8ՇXE3. /bgQ f7q-CBA\WcTrOhiZs1H@КMϙ=N1.zJ~Ę fBM7JP I*QG)7(ߜU|O;>{Ϛ>kGPͱxUU(݇Ǥӷ7̜0i/i~;Hf` pgz(8c?_BY`PO'՟8V~F̑񳔚?E#U`Xb):~sF̩95eOd|/#|BqF{6I_~c ހn[(Š'A:$`/[f4\Y @0Q TMTM糩)MTM\6a9l* /+A&|0 &<0„&\0L!@ NS)vEwڷVXr!9~RO{\f붟ाɝ:ì9ʜ$?7vƌwS+Ln(>t? m\)v>kjc,ޘ (p&L 6KbY\ʚ|yᶓGX`W1^!;,^UԀahFY'J|";Q#?/Gb|9 @C8F bv:tY23YE!87%?AAgwX_6i{,^Tftw>c|)><| ~gC#!xyb }K4-JV3iňS~ N| a7^Fgz  z'r]qN" *EltedHquv 7A΁lp$`! 8k` m)0݇n?z qA&X9 dØ`.;&c1aXjLN%ƄcB ؉q(pn%s5œ]BX  9|Hsx6,gpcpde3[n6b3k Y6'{VPrAH D- &؁5`X 4"b@`7N] j&0\Ybȯ[>X[~~LHǦh(+3֟X gA0\ .%!ĕ$0][ DI.TJ fziմ Brq'3>ts%xS\dտ .qM@ eߺnt@B6Ëvnޫ$)Sff9sT*dJx ?M938&nJKxeeh274~ 6}soNG]_06)4l>.LGdmHJbeD lU手Cx/XGŰ*mέh.wdPv)iy3],}g*@!kW*>bL ޟԘlѲZWء%ڼ&Q)e)AKaR[|z(3l}D.4##yml$} ffY' .(yLu3jw?]mfy!7}=SWR.؛ԗo΍ҋm]&g[ݧCi0(m?3# ۧ4e~?jʪGUL`Z%w'[jQ," M8Fͽe(>tky`+gYX)f{E^kx=Mb5N>5HoTy]R?6(OVunyx=T͖MWWSWZ-y<|謧7QQd7@ IK0eIt7|4WҒ&0eǟX aqg?@?45u0(7_;T˯fw UP?t*ƒ-~AX.Hj|.LGc3_yyVOͤYFM;ďX|?.tjhU"Ыe|P4`ؾ.K(LugŃjr 6$y1=l|Bqw+LEv+."Vii_yO8vDLukn=/|1y[6!)/QLe6G&k&=~ɩ"u23_T d04v'  KTA}N]AWQ|PK} 2~hd*ӽo ĺc+ H- %tS_uL )ŏ iɮ i,}5aTvZ'neFK?LJ,i?O(8hR×v84amη:;r׌+U4ܽ]XTgP5nN"B%U]n*+o~š,xU_10[ۯ>,!Nq١a3Z*6KclMMg-ujjt] ̺G wTudXqGFSEar뭨=;8 [@S&c,oUͻ3^ '\ nYoy(5ƺ:nɜ+a?ԅ]tZ&IX;Q7]=75!J5GүKϻ!э_R\o__%XF'ɺL;+CLbAm/ Ujw}VRgz*ZyM~`]BIbHQ|+|^XQꎅA-fVmWI7Q/_)V̼k~˼G󼀍^=_EC(Mg?rԸVd{:v^% E֛!=/kn&,iSD,GS \]tulpSfԺR*W_^a;ii ڷӚ۾ v k|&9l=Xܿ5li)#Bkٮ7(Ӕ1{;Ǯ7B5<)v֚1Z(|k7Ȟ*oNMN[E^ ؼ:6V̋u׽|IS;}PS9Æ㫯KNH3>)K԰PzE&zWP0Π9T表C*7mPnÞ [n‰?WϨl Ċ[~ao%Ev".u3eeu@miúط ~4 ] - !tHww4 /}9>{X f+f*?ƅXxgs%"P9@"f-}LB~X^o{lo-bcHȚb&Q1cT^k o)-`w4izBiwYRHI*J/UE$jJ+ߐmH7$ 5̏w l 3y<쥂I;HTSk3Yh׏Wm٬2] iy*h_=3)w0}5TbbU :mɯ֜xl<+?$īnTJjپRH[`:tKO]'2] ה|s*.G_eFg11*`I{@A$,PKf!=(uxzXr7K\[lGzEV&ÜR/'3!CF)n=>^ _4Ay”m,R7(w-4+$Tn~6tMa+QA\iz6w -j 4XTh,]cɳ oD fxs66Yo(Ii}E;kI~[vl&Z%'B&6GS!3숩dSo<`6k.@&<`Z.8< mp V?A3 ɕ;ѫ Md_'QhL OMM$bnpExG k-ׇ9!f@"$Tgp-Pc)bM*1&5D&zZsÈrJק^7ѼiHtcjw8sg)pδx&9У ə 1jf[,Z'#JI!ڥ28ϙ&Y!!qDh$ĐY`%̡%Y㬃s8L臼P%b~D^}tG3]ЯLPQa5 ,3GlwYUxHΠZJgԿ;3p:5{fzu`ѹه?L;3gܫВ ܒ-;re 4,J}\eNFZˑUd-KL٫8]-& /$L]J\'ݎ;i՚_ʻF߸<$GKZm$1f'J\S%o /-Drj?f4t_v)u& VV@p?0Rdp|6IOGjvBUn,tקکQLLGӑ!U"U6x~ߖYncimXJϘP@ϘŐˈ؀U1®fh@+\?pW~M;!χp`)@fB r1,_4JiZ}-i->򍚃X@@_mMS):5*MmG8FFAQ~^zKul9(s嫚]j|*!&98LI9(WՏY M,Hy[뷐Y<)AT چ4AR=1nqG^zkD*x+!%kvbt:~_tئE$&wr gx*LA]܏D;iM^Pșt9Iu"̙SI8r %v}r؀<95W\krUCH7avn dX$ao !'_N#sy8]ˁ#@Gu5:f:le2hCi#ub:cs_EEEm@wFؒkx6&TNMDP{R gxPj\?Oa5m]Q&h;ZtU5e +GvUVRJ,?sKh~Y5f} /f4;MOh =2CՋ{5CA-65?$, fP1[`#<30{@ FkΰEQ'R&k"-%^FQX˘X@֏Nsta\EFԁaZD7 &&&B#g2za~}Ls@ l' ^un^WGCK{ wfBON_8G,KROW\]Z)y\.]yiZjkIhK$92)63tz?gwlX%dXnPT=cԉNq%pif8ʜKzJM>ЛbKbjf,'HXf /RLWi38ƂԹģ<3eĝd^K|hTO K~'hG^9˝z|~֘ N"G@F}N8MHMZAD`Nۺ(aSVMF60K>TqM:R.J5$ɇwq /LAMd}I${tSYWo sd0%dWf` U3FvVՠ3$9Z 2`T㧬%eu]ӭUp9.YlpYޅJ'撖@ F;Zni LcZ:zUDԬubj)!>ٶʌHo?W*l ޕŬfRLtB} ^1z0&Jv!pUce@w׭]xY8iJ̳Ҁ)U6;HŨ"y|B(>;t,[{04)Ơ(Oȸ. o%tE1sEq1@Է#`/r*E+r)[m==9ZRd=4LO gM {4 rX*83 ſY>MCDP:WD5X`ֈ*lb v|o}ؼ#kc8z6Jӑ 5>pnw=zIr箓 \6Px"rlqp!JL_1SMGBe4WoAXv9Eڊ(YEG{o ʜ޺|QELLA9ؿ3|<1lDK٧9 Hhh}0U>A$c "rc}06#5:g6RyPN'WMOeؤXP8_\o`Y؏m̢?K(f7h]N4r/"%FsE\ϡj2X (wLi mۉ-HfQ??+]=bP^騵pPH!n QΆa9Ї)C#5cG,'oS%Ash.NM#YϪ`.ઙezkEt?YC_J:t!+S|t_(r@Co5Bb&ØI?ibXnY\ȷ5wN%նZT)É+?\.kP+|PYo9r5TV>+{ u#`4a M=ˏ4:4hI~oDט$3fRҾ,o4.#Zk)ok 7%Ϥ\(R d`z.9mccVo={P&xlY~^0@ (7o*%J~QGKH_&0%'y`0=tK4@4?3>,CJXmԢl#;f_hbUw5~?ش#p8BkɍN[W.sNx qE/Dˏ* [okt&.r"/Vv Xnɾ*;;hpo&6+jGsc>Vhcd;ЛVȓ„+T{.frrsYE֮1/:ۙGS鈉'JO>Bx 3uN=;̎zV1Ql,meeo&쏴`r Mp{Y<gUp..0m}ߎ.\}j7\Y,8؉:i~_D2uBzY\Q'F % |nƭfi71PW "gjnB;K d"TY}@ёh|Q@r[.E yĕɏ7V7;}6)Қv w/2X:lُF mka!8 *'~l/i_bg7?5k-ңnE\j&Tr~**-rjvf dX7쨳ᙸ0h!cv !3|V' ;[C1o𧴿}3~$#KYIE_ǯqQ-iU'9-P#O.Ǽ` wڊ1,҂?S&cxne;?I{tjs;{} eNXޝ_, $N#vpMTvyAyس^xy?p./FTcNtE*DYGt |'gkj?1[gzn,PT]҉ `esΊ<ϓ̈́}E5a3BJQCwIWeޑuVMp$W gi(GS _ys\W=<"7?ŀy OyO, c d'%G䘔娨: {i{${Y؊:AtU 3ResX)ɩSY)yhJP13r ᜫk8>, %'b=dž;O'^G@fǜl,Lav8W~sEg>!"b/)Ai իѤ՝wg*1`U`jJ ƨ0L"/dxL& /'Y1!/ dR_5]ȃa`rħPpglkaԅ $P5,"]1"#򨰮7kDO:0s Y\kl5ڒ=QX+[87*l5oh:OVa7^#Sծb;Lqf|L~OFӍh&?aԓ<"ܧy7ukBԞ~몬M~cf Ԗ1h1Za9jl(2f/GZWQ*ڏ;Gb?{Θ(;KAe !oMEfA2Me9`Jyt[tAB4aO0"~p ȑ4}fKBiwP'}US e^?3\R8}z m ꝁYd(_7~l/Agq`§uEqOCKƓҼ=uT|lֱw0880= e//gHs۞g^jg|X ,z o #Ѓ`)=eqJu'9 Wܟ/(=mY~sAg9<%\Wþbӛ]v} q7`Xӣt0YgqUO<I#9.}Z͗6Eǩ {{;%b䣴oSS5 ~/?bsh?6WiCѬ2YB7))mYwfaHӯC-,;!aB6wdmBK[`L,LI1@:5v-k3{lSY;~ToB6~6Ծ~GM8BN,vmͫysY?LP"+Opoݳ{>[={*LdjGdiDgO$37&ҷ2q#7'qVNvxJXXÌ ` (u5Cf[#7\Sx \;ĠsYd@9`'0hTA@ `P Q@F? ݫŰp @տ#%̀VFDwo&$[ۊfĽ9ƍ_732u`,l4H,,],H)0!@,R1 @r=4NucB_+GI@wd`odr7@z3@s;<$!]lPnkgCS[/ݯhoYu28\5f'+[QYRS"AiNY?wk~%Eě q ^~GGܲ? &<vTFU\& ?* >X@nԄw&e-A] w$ĸpn_T A-?Gr/Vƣ11>x3|B&Ļ?vgo^|_wPSokQڰ`4P/*xe2ES|*44IO|ꁽm.:J׷Rdň8p]>?}r uUj8}\5Erwנwo߶/߶/߷^^^^^^^0buildbot-2.6.0/master/buildbot/test/regressions/000077500000000000000000000000001361162603000217425ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/regressions/__init__.py000066400000000000000000000000001361162603000240410ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/regressions/test_bad_change_properties_rows.py000066400000000000000000000050701361162603000307360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import changes from buildbot.test.fake import fakedb from buildbot.test.util import connector_component class TestBadRows(connector_component.ConnectorComponentMixin, unittest.TestCase): # See bug #1952 for details. This checks that users who used a development # version between 0.8.3 and 0.8.4 get reasonable behavior even though some # rows in the change_properties database do not contain a proper [value, # source] tuple. @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['patches', 'sourcestamps', 'changes', 'change_properties', 'change_files']) self.db.changes = changes.ChangesConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def test_bogus_row_no_source(self): yield self.insertTestData([ fakedb.SourceStamp(id=10), fakedb.ChangeProperty(changeid=13, property_name='devel', property_value='"no source"'), fakedb.Change(changeid=13, sourcestampid=10), ]) c = yield self.db.changes.getChange(13) self.assertEqual(c['properties'], dict(devel=('no source', 'Change'))) @defer.inlineCallbacks def test_bogus_row_jsoned_list(self): yield self.insertTestData([ fakedb.SourceStamp(id=10), fakedb.ChangeProperty(changeid=13, property_name='devel', property_value='[1, 2]'), fakedb.Change(changeid=13, sourcestampid=10), ]) c = yield self.db.changes.getChange(13) self.assertEqual(c['properties'], dict(devel=([1, 2], 'Change'))) buildbot-2.6.0/master/buildbot/test/regressions/test_oldpaths.py000066400000000000000000000113201361162603000251660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest def deprecatedImport(fn): def wrapper(self): fn(self) warnings = self.flushWarnings() # on older Pythons, this warning appears twice, so use collapse it if len(warnings) == 2 and warnings[0] == warnings[1]: del warnings[1] self.assertEqual(len(warnings), 1, "got: %r" % (warnings,)) self.assertEqual(warnings[0]['category'], DeprecationWarning) return wrapper class OldImportPaths(unittest.TestCase): """ Test that old, deprecated import paths still work. """ def test_scheduler_Scheduler(self): from buildbot.scheduler import Scheduler assert Scheduler def test_schedulers_basic_Scheduler(self): # renamed to basic.SingleBranchScheduler from buildbot.schedulers.basic import Scheduler assert Scheduler def test_scheduler_AnyBranchScheduler(self): from buildbot.scheduler import AnyBranchScheduler assert AnyBranchScheduler def test_scheduler_basic_Dependent(self): from buildbot.schedulers.basic import Dependent assert Dependent def test_scheduler_Dependent(self): from buildbot.scheduler import Dependent assert Dependent def test_scheduler_Periodic(self): from buildbot.scheduler import Periodic assert Periodic def test_scheduler_Nightly(self): from buildbot.scheduler import Nightly assert Nightly def test_scheduler_Triggerable(self): from buildbot.scheduler import Triggerable assert Triggerable def test_scheduler_Try_Jobdir(self): from buildbot.scheduler import Try_Jobdir assert Try_Jobdir def test_scheduler_Try_Userpass(self): from buildbot.scheduler import Try_Userpass assert Try_Userpass def test_schedulers_filter_ChangeFilter(self): # this was the location of ChangeFilter until 0.8.4 from buildbot.schedulers.filter import ChangeFilter assert ChangeFilter def test_process_base_Build(self): from buildbot.process.base import Build assert Build def test_buildrequest_BuildRequest(self): from buildbot.buildrequest import BuildRequest assert BuildRequest def test_process_subunitlogobserver_SubunitShellCommand(self): from buildbot.process.subunitlogobserver import SubunitShellCommand assert SubunitShellCommand def test_status_builder_results(self): # these symbols are now in buildbot.process.results, but lots of user # code references them here: from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, SKIPPED from buildbot.status.builder import EXCEPTION, RETRY, Results from buildbot.status.builder import worst_status # reference the symbols to avoid failure from pyflakes (SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, Results, worst_status) def test_status_builder_BuildSetStatus(self): from buildbot.status.builder import BuildSetStatus assert BuildSetStatus def test_status_builder_Status(self): from buildbot.status.builder import Status assert Status def test_status_builder_Event(self): from buildbot.status.builder import Event assert Event def test_status_builder_BuildStatus(self): from buildbot.status.builder import BuildStatus assert BuildStatus def test_steps_source_Source(self): from buildbot.steps.source import Source assert Source def test_buildstep_remotecommand(self): from buildbot.process.buildstep import RemoteCommand, \ LoggedRemoteCommand, RemoteShellCommand assert RemoteCommand assert LoggedRemoteCommand assert RemoteShellCommand def test_buildstep_logobserver(self): from buildbot.process.buildstep import LogObserver, \ LogLineObserver, OutputProgressObserver assert LogObserver assert LogLineObserver assert OutputProgressObserver buildbot-2.6.0/master/buildbot/test/regressions/test_steps_shell_WarningCountingShellCommand.py000066400000000000000000000036141361162603000333670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.trial import unittest from buildbot.steps.shell import WarningCountingShellCommand class TestWarningCountingShellCommand(unittest.TestCase): # Makes sure that it is possible to suppress warnings even if the # warning extractor does not provide line information def testSuppressingLinelessWarningsPossible(self): # Use a warningExtractor that does not provide line # information w = WarningCountingShellCommand( warningExtractor=WarningCountingShellCommand.warnExtractWholeLine, command="echo") # Add suppression manually instead of using suppressionFile fileRe = None warnRe = ".*SUPPRESS.*" start = None end = None suppression = (fileRe, warnRe, start, end) w.addSuppression([suppression]) # Now call maybeAddWarning warnings = [] line = "this warning should be SUPPRESSed" match = re.match(".*warning.*", line) w.maybeAddWarning(warnings, line, match) # Finally make the suppressed warning was *not* added to the # list of warnings expectedWarnings = 0 self.assertEqual(len(warnings), expectedWarnings) buildbot-2.6.0/master/buildbot/test/test_extra_coverage.py000066400000000000000000000042511361162603000240100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this file imports a number of source files that are not # included in the coverage because none of the tests import # them; this results in a more accurate total coverage percent. from buildbot import worker from buildbot.changes import p4poller from buildbot.changes import svnpoller from buildbot.clients import base from buildbot.clients import sendchange from buildbot.clients import tryclient from buildbot.process import subunitlogobserver from buildbot.scripts import checkconfig from buildbot.scripts import logwatcher from buildbot.scripts import reconfig from buildbot.scripts import runner from buildbot.status import client from buildbot.steps import master from buildbot.steps import maxq from buildbot.steps import python from buildbot.steps import python_twisted from buildbot.steps import subunit from buildbot.steps import trigger from buildbot.steps import vstudio from buildbot.steps.package.rpm import rpmbuild from buildbot.steps.package.rpm import rpmlint from buildbot.steps.package.rpm import rpmspec from buildbot.util import eventual modules = [] # for the benefit of pyflakes modules.extend([worker]) modules.extend([p4poller, svnpoller]) modules.extend([base, sendchange, tryclient]) modules.extend([subunitlogobserver]) modules.extend([checkconfig, logwatcher, reconfig, runner]) modules.extend([client]) modules.extend([master, maxq, python, python_twisted, subunit]) modules.extend([trigger, vstudio]) modules.extend([rpmbuild, rpmlint, rpmspec]) modules.extend([eventual]) buildbot-2.6.0/master/buildbot/test/unit/000077500000000000000000000000001361162603000203565ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/unit/__init__.py000066400000000000000000000000001361162603000224550ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/unit/test_buildbot_net_usage_data.py000066400000000000000000000134661361162603000266300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import platform from unittest.case import SkipTest from urllib import request as urllib_request from twisted.internet import reactor from twisted.python.filepath import FilePath from twisted.trial import unittest import buildbot.buildbot_net_usage_data from buildbot import config from buildbot.buildbot_net_usage_data import _sendBuildbotNetUsageData from buildbot.buildbot_net_usage_data import computeUsageData from buildbot.buildbot_net_usage_data import linux_distribution from buildbot.config import BuilderConfig from buildbot.config import ConfigWarning from buildbot.master import BuildMaster from buildbot.plugins import steps from buildbot.process.factory import BuildFactory from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test.util.integration import DictLoader from buildbot.test.util.warnings import assertProducesWarning from buildbot.worker.base import Worker class Tests(unittest.TestCase): def getMaster(self, config_dict): """ Create a started ``BuildMaster`` with the given configuration. """ basedir = FilePath(self.mktemp()) basedir.createDirectory() master = BuildMaster( basedir.path, reactor=reactor, config_loader=DictLoader(config_dict)) master.config = master.config_loader.loadConfig() return master def getBaseConfig(self): return { 'builders': [ BuilderConfig(name="testy", workernames=["local1", "local2"], factory=BuildFactory([steps.ShellCommand(command='echo hello')])), ], 'workers': [Worker('local' + str(i), 'pass') for i in range(3)], 'schedulers': [ ForceScheduler( name="force", builderNames=["testy"]) ], 'protocols': {'null': {}}, 'multiMaster': True, } def test_basic(self): self.patch(config, "_in_unit_tests", False) with assertProducesWarning( ConfigWarning, message_pattern=r"`buildbotNetUsageData` is not configured and defaults to basic."): master = self.getMaster(self.getBaseConfig()) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['versions', 'db', 'platform', 'installid', 'mq', 'plugins', 'www_plugins'])) self.assertEqual(data['plugins']['buildbot/worker/base/Worker'], 3) self.assertEqual(sorted(data['plugins'].keys()), sorted( ['buildbot/schedulers/forcesched/ForceScheduler', 'buildbot/worker/base/Worker', 'buildbot/steps/shell/ShellCommand', 'buildbot/config/BuilderConfig'])) def test_full(self): c = self.getBaseConfig() c['buildbotNetUsageData'] = 'full' master = self.getMaster(c) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['versions', 'db', 'installid', 'platform', 'mq', 'plugins', 'builders', 'www_plugins'])) def test_custom(self): c = self.getBaseConfig() def myCompute(data): return dict(db=data['db']) c['buildbotNetUsageData'] = myCompute master = self.getMaster(c) data = computeUsageData(master) self.assertEqual(sorted(data.keys()), sorted(['db'])) def test_urllib(self): self.patch(buildbot.buildbot_net_usage_data, '_sendWithRequests', lambda _, __: None) class FakeRequest: def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs open_url = [] class urlopen: def __init__(self, r): self.request = r open_url.append(self) def read(self): return "ok" def close(self): pass self.patch(urllib_request, "Request", FakeRequest) self.patch(urllib_request, "urlopen", urlopen) _sendBuildbotNetUsageData({'foo': 'bar'}) self.assertEqual(len(open_url), 1) self.assertEqual(open_url[0].request.args, ('https://events.buildbot.net/events/phone_home', b'{"foo": "bar"}', {'Content-Length': 14, 'Content-Type': 'application/json'})) def test_real(self): if "TEST_BUILDBOTNET_USAGEDATA" not in os.environ: raise SkipTest( "_sendBuildbotNetUsageData real test only run when environment variable" " TEST_BUILDBOTNET_USAGEDATA is set") _sendBuildbotNetUsageData({'foo': 'bar'}) def test_linux_distro(self): system = platform.system() if system != "Linux": raise SkipTest("test is only for linux") distro = linux_distribution() self.assertEqual(len(distro), 2) self.assertNotIn("unknown", distro[0]) # Rolling distributions like Arch Linux (arch) does not have VERSION_ID if distro[0] != "arch": self.assertNotIn("unknown", distro[1]) buildbot-2.6.0/master/buildbot/test/unit/test_changes_base.py000066400000000000000000000210271361162603000243730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import base from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin class TestChangeSource(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): timeout = 120 class Subclass(base.ChangeSource): pass @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() @defer.inlineCallbacks def test_activation(self): cs = self.Subclass(name="DummyCS") cs.activate = mock.Mock(return_value=defer.succeed(None)) cs.deactivate = mock.Mock(return_value=defer.succeed(None)) # set the changesourceid, and claim the changesource on another master self.attachChangeSource(cs) self.setChangeSourceToMaster(self.OTHER_MASTER_ID) yield cs.startService() cs.clock.advance(cs.POLL_INTERVAL_SEC / 2) cs.clock.advance(cs.POLL_INTERVAL_SEC / 5) cs.clock.advance(cs.POLL_INTERVAL_SEC / 5) self.assertFalse(cs.activate.called) self.assertFalse(cs.deactivate.called) self.assertFalse(cs.active) self.assertEqual(cs.serviceid, self.DUMMY_CHANGESOURCE_ID) # clear that masterid yield cs.stopService() self.setChangeSourceToMaster(None) yield cs.startService() cs.clock.advance(cs.POLL_INTERVAL_SEC) self.assertTrue(cs.activate.called) self.assertFalse(cs.deactivate.called) self.assertTrue(cs.active) # stop the service and see that deactivate is called yield cs.stopService() self.assertTrue(cs.activate.called) self.assertTrue(cs.deactivate.called) self.assertFalse(cs.active) class TestPollingChangeSource(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): timeout = 120 class Subclass(base.PollingChangeSource): pass @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() self.attachChangeSource(self.Subclass(name="DummyCS")) def tearDown(self): return self.tearDownChangeSource() @defer.inlineCallbacks def runClockFor(self, _, secs): yield self.reactor.pump([1.0] * secs) def test_loop_loops(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) self.changesource.pollInterval = 5 self.startChangeSource() d = defer.Deferred() d.addCallback(self.runClockFor, 12) @d.addCallback def check(_): # note that it does *not* poll at time 0 self.assertEqual(loops, [5.0, 10.0]) self.reactor.callWhenRunning(d.callback, None) return d def test_loop_exception(self): # track when poll() gets called loops = [] def poll(): loops.append(self.reactor.seconds()) raise RuntimeError("oh noes") self.changesource.poll = poll self.changesource.pollInterval = 5 self.startChangeSource() d = defer.Deferred() d.addCallback(self.runClockFor, 12) @d.addCallback def check(_): # note that it keeps looping after error self.assertEqual(loops, [5.0, 10.0]) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) self.reactor.callWhenRunning(d.callback, None) return d def test_poll_only_if_activated(self): """The polling logic only applies if the source actually starts!""" self.setChangeSourceToMaster(self.OTHER_MASTER_ID) loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) self.changesource.pollInterval = 5 self.startChangeSource() d = defer.Deferred() d.addCallback(self.runClockFor, 12) @d.addCallback def check(_): # it doesn't do anything because it was already claimed self.assertEqual(loops, []) self.reactor.callWhenRunning(d.callback, None) return d def test_pollAtLaunch(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) self.changesource.pollInterval = 5 self.changesource.pollAtLaunch = True self.startChangeSource() d = defer.Deferred() d.addCallback(self.runClockFor, 12) @d.addCallback def check(_): # note that it *does* poll at time 0 self.assertEqual(loops, [0.0, 5.0, 10.0]) self.reactor.callWhenRunning(d.callback, None) return d class TestReconfigurablePollingChangeSource(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): class Subclass(base.ReconfigurablePollingChangeSource): pass @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() self.attachChangeSource(self.Subclass(name="DummyCS")) def tearDown(self): return self.tearDownChangeSource() @defer.inlineCallbacks def runClockFor(self, secs): yield self.reactor.pump([1.0] * secs) @defer.inlineCallbacks def test_loop_loops(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # note that it does *not* poll at time 0 self.assertEqual(loops, [5.0, 10.0]) @defer.inlineCallbacks def test_loop_exception(self): # track when poll() gets called loops = [] def poll(): loops.append(self.reactor.seconds()) raise RuntimeError("oh noes") self.changesource.poll = poll yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # note that it keeps looping after error self.assertEqual(loops, [5.0, 10.0]) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) @defer.inlineCallbacks def test_poll_only_if_activated(self): """The polling logic only applies if the source actually starts!""" self.setChangeSourceToMaster(self.OTHER_MASTER_ID) loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=False)) yield self.runClockFor(12) # it doesn't do anything because it was already claimed self.assertEqual(loops, []) @defer.inlineCallbacks def test_pollAtLaunch(self): # track when poll() gets called loops = [] self.changesource.poll = \ lambda: loops.append(self.reactor.seconds()) yield self.startChangeSource() yield self.changesource.reconfigServiceWithSibling(self.Subclass( name="DummyCS", pollInterval=5, pollAtLaunch=True)) yield self.runClockFor(12) # note that it *does* poll at time 0 self.assertEqual(loops, [0.0, 5.0, 10.0]) buildbot-2.6.0/master/buildbot/test/unit/test_changes_bitbucket.py000066400000000000000000000436611361162603000254450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from datetime import datetime from twisted.internet import defer from twisted.trial import unittest from twisted.web import client from twisted.web.error import Error from buildbot.changes.bitbucket import BitbucketPullrequestPoller from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin class SourceRest(): """https://bitbucket.org/!api/2.0/repositories/{owner}/{slug}""" template = """\ { "hash": "%(hash)s", "links": { "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s/commits/%(short_hash)s" } }, "repository": { "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s" } } }, "date": "%(date)s" } """ repo_template = """\ { "links": { "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s" } } } """ def __init__(self, owner, slug, hash, date): self.owner = owner self.slug = slug self.hash = hash self.date = date def request(self): return self.template % { "owner": self.owner, "slug": self.slug, "hash": self.hash, "short_hash": self.hash[0:12], "date": self.date, } def repo_request(self): return self.repo_template % { "owner": self.owner, "slug": self.slug, } class PullRequestRest(): """https://bitbucket.org/!api/2.0/repositories/{owner}/{slug}/pullrequests/{pull_request_id}""" template = """\ { "description": "%(description)s", "title": "%(title)s", "source": { "commit": { "hash": "%(hash)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s/commit/%(hash)s" } } } }, "state": "OPEN", "author": { "display_name": "%(display_name)s" }, "created_on": "%(created_on)s", "participants": [ ], "updated_on": "%(updated_on)s", "merge_commit": null, "id": %(id)d } """ def __init__(self, nr, title, description, display_name, source, created_on, updated_on=None): self.nr = nr self.title = title self.description = description self.display_name = display_name self.source = source self.created_on = created_on if updated_on: self.updated_on = updated_on else: self.updated_on = self.created_on def request(self): return self.template % { "description": self.description, "title": self.title, "hash": self.source.hash, "short_hash": self.source.hash[0:12], "owner": self.source.owner, "slug": self.source.slug, "display_name": self.display_name, "created_on": self.created_on, "updated_on": self.updated_on, "id": self.nr, } class PullRequestListRest(): """https://bitbucket.org/api/2.0/repositories/{owner}/{slug}/pullrequests""" template = """\ { "description": "%(description)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(owner)s/%(slug)s/pullrequests/%(id)d" }, "html": { "href": "https://bitbucket.org/%(owner)s/%(slug)s/pull-request/%(id)d" } }, "author": { "display_name": "%(display_name)s" }, "title": "%(title)s", "source": { "commit": { "hash": "%(short_hash)s", "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(src_owner)s/%(src_slug)s/commit/%(short_hash)s" } } }, "repository": { "links": { "self": { "href": "https://bitbucket.org/!api/2.0/repositories/%(src_owner)s/%(src_slug)s" } } }, "branch": { "name": "default" } }, "state": "OPEN", "created_on": "%(created_on)s", "updated_on": "%(updated_on)s", "merge_commit": null, "id": %(id)s } """ def __init__(self, owner, slug, prs): self.owner = owner self.slug = slug self.prs = prs self.pr_by_id = {} self.src_by_url = {} for pr in prs: self.pr_by_id[pr.nr] = pr self.src_by_url["%s/%s" % (pr.source.owner, pr.source.slug)] = pr.source def request(self): s = "" for pr in self.prs: s += self.template % { "description": pr.description, "owner": self.owner, "slug": self.slug, "display_name": pr.display_name, "title": pr.title, "hash": pr.source.hash, "short_hash": pr.source.hash[0:12], "src_owner": pr.source.owner, "src_slug": pr.source.slug, "created_on": pr.created_on, "updated_on": pr.updated_on, "id": pr.nr, } return """\ { "pagelen": 10, "values": [%s ], "page": 1 } """ % s def getPage(self, url, timeout=None): list_url_re = re.compile( r"https://bitbucket.org/api/2.0/repositories/%s/%s/pullrequests" % (self.owner, self.slug)) pr_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/%s/%s/pullrequests/(?P\d+)" % (self.owner, self.slug)) source_commit_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P.*)/(?P.*)/commit/(?P\d+)") source_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P.*)/(?P.*)") if list_url_re.match(url): return defer.succeed(self.request()) m = pr_url_re.match(url) if m: return self.pr_by_id[int(m.group("id"))].request() m = source_commit_url_re.match(url) if m: return self.src_by_url["%s/%s" % (m.group("src_owner"), m.group("src_slug"))].request() m = source_url_re.match(url) if m: return self.src_by_url["%s/%s" % (m.group("src_owner"), m.group("src_slug"))].repo_request() raise Error(code=404) class TestBitbucketPullrequestPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # create pull requests self.date = "2013-10-15T20:38:20.001797+00:00" self.date_epoch = datetime.strptime(self.date.split('.')[0], '%Y-%m-%dT%H:%M:%S') src = SourceRest( owner="contributor", slug="slug", hash="1111111111111111111111111111111111111111", date=self.date, ) pr = PullRequestRest( nr=1, title="title", description="description", display_name="contributor", source=src, created_on=self.date, ) self.pr_list = PullRequestListRest( owner="owner", slug="slug", prs=[pr], ) # update src = SourceRest( owner="contributor", slug="slug", hash="2222222222222222222222222222222222222222", date=self.date, ) pr = PullRequestRest( nr=1, title="title", description="description", display_name="contributor", source=src, created_on=self.date, ) self.pr_list2 = PullRequestListRest( owner="owner", slug="slug", prs=[pr], ) return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def _fakeGetPage(self, result): # Install a fake getPage that puts the requested URL in self.getPage_got_url # and return result self.getPage_got_url = None def fake(url, timeout=None): self.getPage_got_url = url return defer.succeed(result) self.patch(client, "getPage", fake) def _fakeGetPage404(self): def fail(url, timeout=None): raise Error(code=404) self.patch(client, "getPage", fail) def attachDefaultChangeSource(self): return self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug')) # tests @defer.inlineCallbacks def test_describe(self): yield self.attachDefaultChangeSource() assert re.search(r'owner/slug', self.changesource.describe()) @defer.inlineCallbacks def test_poll_unknown_repo(self): yield self.attachDefaultChangeSource() # Polling a non-existent repository should result in a 404 self._fakeGetPage404() try: yield self.changesource.poll() self.fail( 'Polling a non-existent repository should result in a 404.') except Exception as e: self.assertEqual(str(e), '404 Not Found') @defer.inlineCallbacks def test_poll_no_pull_requests(self): yield self.attachDefaultChangeSource() rest = PullRequestListRest(owner="owner", slug="slug", prs=[]) self._fakeGetPage(rest.request()) yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_new_pull_requests(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) @defer.inlineCallbacks def test_poll_no_updated_pull_request(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) # repoll yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) @defer.inlineCallbacks def test_poll_updated_pull_request(self): yield self.attachDefaultChangeSource() # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) self.patch(client, "getPage", self.pr_list2.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [ { 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }, { 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '2222222222222222222222222222222222222222', 'revlink': 'https://bitbucket.org/contributor/slug/commits/222222222222', 'src': 'bitbucket', 'when_timestamp': 1381869500, } ]) @defer.inlineCallbacks def test_poll_pull_request_filter_False(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', pullrequest_filter=lambda x: False )) # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_pull_request_filter_True(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', pullrequest_filter=lambda x: True )) # patch client.getPage() self.patch(client, "getPage", self.pr_list.getPage) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1381869500, }]) @defer.inlineCallbacks def test_poll_pull_request_not_useTimestamps(self): yield self.attachChangeSource(BitbucketPullrequestPoller( owner='owner', slug='slug', useTimestamps=False, )) self.patch(client, "getPage", self.pr_list.getPage) self.reactor.advance(1396825656) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'contributor', 'committer': None, 'branch': 'default', 'category': None, 'codebase': None, 'comments': 'pull-request #1: title\nhttps://bitbucket.org/owner/slug/pull-request/1', 'files': None, 'project': '', 'properties': {}, 'repository': 'https://bitbucket.org/contributor/slug', 'revision': '1111111111111111111111111111111111111111', 'revlink': 'https://bitbucket.org/contributor/slug/commits/111111111111', 'src': 'bitbucket', 'when_timestamp': 1396825656, }]) buildbot-2.6.0/master/buildbot/test/unit/test_changes_changes.py000066400000000000000000000162701361162603000250750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pprint import re import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import changes from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class Change(unittest.TestCase, TestReactorMixin): change23_rows = [ fakedb.Change(changeid=23, author="dustin", committer="dustin", comments="fix whitespace", branch="warnerdb", revision="deadbeef", when_timestamp=266738404, revlink='http://warner/0e92a098b', category='devel', repository='git://warner', codebase='mainapp', project='Buildbot'), fakedb.ChangeFile(changeid=23, filename='master/README.txt'), fakedb.ChangeFile(changeid=23, filename='worker/README.txt'), fakedb.ChangeProperty(changeid=23, property_name='notest', property_value='["no","Change"]'), fakedb.ChangeUser(changeid=23, uid=27), ] def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.change23 = changes.Change(**dict( # using **dict(..) forces kwargs category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738404, comments='fix whitespace', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098b', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change23.number = 23 self.change24 = changes.Change(**dict( category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738405, comments='fix whitespace again', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098c', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change24.number = 24 self.change25 = changes.Change(**dict( category='devel', repository='git://warner', codebase='mainapp', who='dustin', committer='dustin', when=266738406, comments='fix whitespace again', project='Buildbot', branch='warnerdb', revlink='http://warner/0e92a098d', properties={'notest': "no"}, files=['master/README.txt', 'worker/README.txt'], revision='deadbeef')) self.change25.number = 25 @defer.inlineCallbacks def test_fromChdict(self): # get a real honest-to-goodness chdict from the fake db yield self.master.db.insertTestData(self.change23_rows) chdict = yield self.master.db.changes.getChange(23) exp = self.change23 got = yield changes.Change.fromChdict(self.master, chdict) # compare ok = True ok = ok and got.number == exp.number ok = ok and got.who == exp.who ok = ok and got.committer == exp.committer ok = ok and sorted(got.files) == sorted(exp.files) ok = ok and got.comments == exp.comments ok = ok and got.revision == exp.revision ok = ok and got.when == exp.when ok = ok and got.branch == exp.branch ok = ok and got.category == exp.category ok = ok and got.revlink == exp.revlink ok = ok and got.properties == exp.properties ok = ok and got.repository == exp.repository ok = ok and got.codebase == exp.codebase ok = ok and got.project == exp.project if not ok: def printable(c): return pprint.pformat(c.__dict__) self.fail("changes do not match; expected\n%s\ngot\n%s" % (printable(exp), printable(got))) def test_str(self): string = str(self.change23) self.assertTrue(re.match(r"Change\(.*\)", string), string) def test_asText(self): text = self.change23.asText() self.assertTrue(re.match(textwrap.dedent('''\ Files: master/README.txt worker/README.txt On: git://warner For: Buildbot At: .* Changed By: dustin Committed By: dustin Comments: fix whitespaceProperties:. notest: no '''), text), text) def test_asDict(self): dict = self.change23.asDict() self.assertIn('1978', dict['at']) # timezone-sensitive del dict['at'] self.assertEqual(dict, { 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'mainapp', 'comments': 'fix whitespace', 'files': [{'name': 'master/README.txt'}, {'name': 'worker/README.txt'}], 'number': 23, 'project': 'Buildbot', 'properties': [('notest', 'no', 'Change')], 'repository': 'git://warner', 'rev': 'deadbeef', 'revision': 'deadbeef', 'revlink': 'http://warner/0e92a098b', 'when': 266738404, 'who': 'dustin', 'committer': 'dustin'}) def test_getShortAuthor(self): self.assertEqual(self.change23.getShortAuthor(), 'dustin') def test_getTime(self): # careful, or timezones will hurt here self.assertIn('Jun 1978', self.change23.getTime()) def test_getTimes(self): self.assertEqual(self.change23.getTimes(), (266738404, None)) def test_getText(self): self.change23.who = 'nasty < nasty' # test the html escaping (ugh!) self.assertEqual(self.change23.getText(), ['nasty < nasty']) def test_getLogs(self): self.assertEqual(self.change23.getLogs(), {}) def test_compare(self): self.assertEqual(self.change23, self.change23) self.assertNotEqual(self.change24, self.change23) self.assertGreater(self.change24, self.change23) self.assertGreaterEqual(self.change24, self.change23) self.assertGreaterEqual(self.change24, self.change24) self.assertLessEqual(self.change24, self.change24) self.assertLessEqual(self.change23, self.change24) self.assertLess(self.change23, self.change25) buildbot-2.6.0/master/buildbot/test/unit/test_changes_filter.py000066400000000000000000000136341361162603000247530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.trial import unittest from buildbot.changes import filter from buildbot.test.fake.change import Change class ChangeFilter(unittest.TestCase): def setUp(self): self.results = [] # (got, expected, msg) self.filt = None def tearDown(self): if self.results: raise RuntimeError("test forgot to call check()") def setfilter(self, **kwargs): self.filt = filter.ChangeFilter(**kwargs) def yes(self, change, msg): self.results.append((self.filt.filter_change(change), True, msg)) def no(self, change, msg): self.results.append((self.filt.filter_change(change), False, msg)) def check(self): errs = [] for r in self.results: if (r[0] or r[1]) and not (r[0] and r[1]): errs.append(r[2]) self.results = [] if errs: self.fail("; ".join(errs)) def test_filter_change_filter_fn(self): self.setfilter(filter_fn=lambda ch: ch.x > 3) self.no(Change(x=2), "filter_fn returns False") self.yes(Change(x=4), "filter_fn returns True") self.check() def test_filter_change_filt_str(self): self.setfilter(project="myproj") self.no(Change(project="yourproj"), "non-matching PROJECT returns False") self.yes(Change(project="myproj"), "matching PROJECT returns True") self.check() def test_filter_change_filt_list(self): self.setfilter(repository=["vc://a", "vc://b"]) self.yes(Change(repository="vc://a"), "matching REPOSITORY vc://a returns True") self.yes(Change(repository="vc://b"), "matching REPOSITORY vc://b returns True") self.no(Change(repository="vc://c"), "non-matching REPOSITORY returns False") self.no(Change(repository=None), "None for REPOSITORY returns False") self.check() def test_filter_change_filt_list_None(self): self.setfilter(branch=["mybr", None]) self.yes(Change(branch="mybr"), "matching BRANCH mybr returns True") self.yes(Change(branch=None), "matching BRANCH None returns True") self.no(Change(branch="misc"), "non-matching BRANCH returns False") self.check() def test_filter_change_filt_re(self): self.setfilter(category_re="^a.*") self.yes(Change(category="albert"), "matching CATEGORY returns True") self.no( Change(category="boris"), "non-matching CATEGORY returns False") self.check() def test_filter_change_branch_re(self): # regression - see #927 self.setfilter(branch_re="^t.*") self.yes(Change(branch="trunk"), "matching BRANCH returns True") self.no(Change(branch="development"), "non-matching BRANCH returns False") self.no(Change(branch=None), "branch=None returns False") self.check() def test_filter_change_filt_re_compiled(self): self.setfilter(category_re=re.compile("^b.*", re.I)) self.no(Change(category="albert"), "non-matching CATEGORY returns False") self.yes(Change(category="boris"), "matching CATEGORY returns True") self.yes( Change(category="Bruce"), "matching CATEGORY returns True, using re.I") self.check() def test_filter_change_combination(self): self.setfilter(project='p', repository='r', branch='b', category='c', codebase='cb') self.no(Change(project='x', repository='x', branch='x', category='x'), "none match -> False") self.no(Change(project='p', repository='r', branch='b', category='x'), "three match -> False") self.no(Change(project='p', repository='r', branch='b', category='c', codebase='x'), "four match -> False") self.yes(Change(project='p', repository='r', branch='b', category='c', codebase='cb'), "all match -> True") self.check() def test_filter_change_combination_filter_fn(self): self.setfilter(project='p', repository='r', branch='b', category='c', filter_fn=lambda c: c.ff) self.no(Change(project='x', repository='x', branch='x', category='x', ff=False), "none match and fn returns False -> False") self.no(Change(project='p', repository='r', branch='b', category='c', ff=False), "all match and fn returns False -> False") self.no(Change(project='x', repository='x', branch='x', category='x', ff=True), "none match and fn returns True -> False") self.yes(Change(project='p', repository='r', branch='b', category='c', ff=True), "all match and fn returns True -> False") self.check() def test_filter_props(self): self.setfilter() self.filt.checks.update( self.filt.createChecks( ("ref-updated", None, None, "prop:event.type"), )) self.yes( Change(properties={'event.type': 'ref-updated'}), "matching property") self.no( Change(properties={'event.type': 'patch-uploaded'}), "non matching property") self.no(Change(properties={}), "no property") self.check() buildbot-2.6.0/master/buildbot/test/unit/test_changes_gerritchangesource.py000066400000000000000000000515021361162603000273450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc[''], 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import types from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.internet import utils from twisted.python import failure from twisted.trial import unittest from buildbot.changes import gerritchangesource from buildbot.test.fake import fakedb from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.change import Change from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin class TestGerritHelpers(unittest.TestCase): def test_proper_json(self): self.assertEqual("Justin Case ", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "name": "Justin Case", "email": "justin.case@example.com" })) def test_missing_username(self): self.assertEqual("Justin Case ", gerritchangesource._gerrit_user_to_author({ "name": "Justin Case", "email": "justin.case@example.com" })) def test_missing_name(self): self.assertEqual("unknown ", gerritchangesource._gerrit_user_to_author({ "email": "justin.case@example.com" })) self.assertEqual("gerrit ", gerritchangesource._gerrit_user_to_author({ "email": "justin.case@example.com" }, "gerrit")) self.assertEqual("justincase ", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "email": "justin.case@example.com" }, "gerrit")) def test_missing_email(self): self.assertEqual("Justin Case", gerritchangesource._gerrit_user_to_author({ "username": "justincase", "name": "Justin Case" })) self.assertEqual("Justin Case", gerritchangesource._gerrit_user_to_author({ "name": "Justin Case" })) self.assertEqual("justincase", gerritchangesource._gerrit_user_to_author({ "username": "justincase" })) self.assertEqual("unknown", gerritchangesource._gerrit_user_to_author({ })) self.assertEqual("gerrit", gerritchangesource._gerrit_user_to_author({ }, "gerrit")) class TestGerritChangeSource(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def newChangeSource(self, host, user, *args, **kwargs): s = gerritchangesource.GerritChangeSource( host, user, *args, **kwargs) self.attachChangeSource(s) s.configureService() return s # tests def test_describe(self): s = self.newChangeSource('somehost', 'someuser') self.assertSubstring("GerritChangeSource", s.describe()) def test_name(self): s = self.newChangeSource('somehost', 'someuser') self.assertEqual("GerritChangeSource:someuser@somehost:29418", s.name) s = self.newChangeSource('somehost', 'someuser', name="MyName") self.assertEqual("MyName", s.name) # TODO: test the backoff algorithm # this variable is reused in test_steps_source_repo # to ensure correct integration between change source and repo step expected_change = {'category': 'patchset-created', 'files': ['unknown'], 'repository': 'ssh://someuser@somehost:29418/pr', 'author': 'Dustin ', 'committer': None, 'comments': 'fix 1234', 'project': 'pr', 'branch': 'br/4321', 'revlink': 'http://buildbot.net', 'codebase': None, 'revision': 'abcdef', 'src': None, 'when_timestamp': None, 'properties': {'event.change.owner.email': 'dustin@mozilla.com', 'event.change.subject': 'fix 1234', 'event.change.project': 'pr', 'event.change.owner.name': 'Dustin', 'event.change.number': '4321', 'event.change.url': 'http://buildbot.net', 'event.change.branch': 'br', 'event.type': 'patchset-created', 'event.patchSet.revision': 'abcdef', 'event.patchSet.number': '12', 'event.source': 'GerritChangeSource'}} @defer.inlineCallbacks def test_lineReceived_patchset_created(self): s = self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), patchSet=dict(revision="abcdef", number="12") ))) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] for k, v in c.items(): self.assertEqual(self.expected_change[k], v) @defer.inlineCallbacks def test_duplicate_events_ignored(self): s = self.newChangeSource('somehost', 'someuser') yield s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), patchSet=dict(revision="abcdef", number="12") ))) self.assertEqual(len(self.master.data.updates.changesAdded), 1) yield s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", # Note that this time "project" is a dictionary project=dict(name="pr"), number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), patchSet=dict(revision="abcdef", number="12") ))) self.assertEqual(len(self.master.data.updates.changesAdded), 1) @defer.inlineCallbacks def test_malformed_events_ignored(self): s = self.newChangeSource('somehost', 'someuser') # "change" not in event yield s.lineReceived(json.dumps(dict( type="patchset-created", patchSet=dict(revision="abcdef", number="12") ))) self.assertEqual(len(self.master.data.updates.changesAdded), 0) # "patchSet" not in event yield s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", # Note that this time "project" is a dictionary project=dict(name="pr"), number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), ))) self.assertEqual(len(self.master.data.updates.changesAdded), 0) change_merged_event = { "type": "change-merged", "change": { "branch": "br", "project": "pr", "number": "4321", "owner": {"name": "Chuck", "email": "chuck@norris.com"}, "url": "http://buildbot.net", "subject": "fix 1234"}, "patchSet": {"revision": "abcdefj", "number": "13"} } @defer.inlineCallbacks def test_handled_events_filter_true(self): s = self.newChangeSource( 'somehost', 'some_choosy_user', handled_events=["change-merged"]) yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.assertEqual(c["category"], "change-merged") self.assertEqual(c["branch"], "br") @defer.inlineCallbacks def test_handled_events_filter_false(self): s = self.newChangeSource('somehost', 'some_choosy_user') yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_custom_handler(self): s = self.newChangeSource( 'somehost', 'some_choosy_user', handled_events=["change-merged"]) def custom_handler(self, properties, event): event['change']['project'] = "world" return self.addChangeFromEvent(properties, event) # Patches class to not bother with the inheritance s.eventReceived_change_merged = types.MethodType(custom_handler, s) yield s.lineReceived(json.dumps(self.change_merged_event)) self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.assertEqual(c['project'], "world") def test_startStreamProcess_bytes_output(self): s = self.newChangeSource( 'somehost', 'some_choosy_user', debug=True) exp_argv = ['ssh', 'some_choosy_user@somehost', '-p', '29418'] exp_argv += ['gerrit', 'stream-events'] def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], [exp_argv[0], exp_argv]) pp.errReceived(b'test stderr\n') pp.outReceived(b'{"type":"dropped-output"}\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) s.startStreamProcess() # ------------------------------------------------------------------------- # Test data for getFiles() # ------------------------------------------------------------------------- query_files_success_line1 = { "patchSets": [ { "number": 1, "files": [ {"file": "/COMMIT_MSG", "type": "ADDED", "insertions": 13, "deletions": 0}, ], }, { "number": 13, "files": [ {"file": "/COMMIT_MSG", "type": "ADDED", "insertions": 13, "deletions": 0}, {"file": "file1", "type": "MODIFIED", "insertions": 7, "deletions": 0}, {"file": "file2", "type": "MODIFIED", "insertions": 2, "deletions": -2}, ], } ] } query_files_success_line2 = { "type": "stats", "rowCount": 1 } query_files_success = '\n'.join([ json.dumps(query_files_success_line1), json.dumps(query_files_success_line2) ]).encode('utf8') query_files_failure = b'{"type":"stats","rowCount":0}' @defer.inlineCallbacks def test_getFiles(self): s = self.newChangeSource('host', 'user', gerritport=2222) exp_argv = [ 'ssh', 'user@host', '-p', '2222', 'gerrit', 'query', '1000', '--format', 'JSON', '--files', '--patch-sets' ] def getoutput_success(cmd, argv, env): self.assertEqual([cmd, argv], [exp_argv[0], exp_argv[1:]]) return self.query_files_success def getoutput_failure(cmd, argv, env): return self.query_files_failure self.patch(utils, 'getProcessOutput', getoutput_success) res = yield s.getFiles(1000, 13) self.assertEqual(set(res), {'/COMMIT_MSG', 'file1', 'file2'}) self.patch(utils, 'getProcessOutput', getoutput_failure) res = yield s.getFiles(1000, 13) self.assertEqual(res, ['unknown']) @defer.inlineCallbacks def test_getFilesFromEvent(self): s = self.newChangeSource('host', 'user', get_files=True, handled_events=["change-merged"]) def getoutput(cmd, argv, env): return self.query_files_success self.patch(utils, 'getProcessOutput', getoutput) yield s.lineReceived(json.dumps(self.change_merged_event)) c = self.master.data.updates.changesAdded[0] self.assertEqual(set(c['files']), {'/COMMIT_MSG', 'file1', 'file2'}) class TestGerritEventLogPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): NOW_TIMESTAMP = 1479302598 EVENT_TIMESTAMP = 1479302599 NOW_FORMATTED = '2016-11-16 13:23:18' EVENT_FORMATTED = '2016-11-16 13:23:19' OBJECTID = 1234 @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def newChangeSource(self, **kwargs): auth = kwargs.pop('auth', ('log', 'pass')) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'gerrit', auth=auth) self.changesource = gerritchangesource.GerritEventLogPoller( 'gerrit', auth=auth, gitBaseURL="ssh://someuser@somehost:29418", pollAtLaunch=False, **kwargs) @defer.inlineCallbacks def startChangeSource(self): yield self.changesource.setServiceParent(self.master) yield self.attachChangeSource(self.changesource) # tests @defer.inlineCallbacks def test_now(self): yield self.newChangeSource() self.changesource.now() @defer.inlineCallbacks def test_describe(self): # describe is not used yet in buildbot nine, but it can still be useful in the future, so lets # implement and test it yield self.newChangeSource() self.assertSubstring('GerritEventLogPoller', self.changesource.describe()) @defer.inlineCallbacks def test_name(self): yield self.newChangeSource() self.assertEqual('GerritEventLogPoller:gerrit', self.changesource.name) @defer.inlineCallbacks def test_lineReceived_patchset_created(self): self.master.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='GerritEventLogPoller:gerrit', class_name='GerritEventLogPoller')]) yield self.newChangeSource(get_files=True) self.changesource.now = lambda: datetime.datetime.utcfromtimestamp( self.NOW_TIMESTAMP) thirty_days_ago = ( datetime.datetime.utcfromtimestamp(self.NOW_TIMESTAMP) - datetime.timedelta(days=30)) self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': thirty_days_ago.strftime("%Y-%m-%d %H:%M:%S")}, content_json=dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), eventCreatedOn=self.EVENT_TIMESTAMP, patchSet=dict(revision="abcdef", number="12"))) self._http.expect( method='get', ep='/changes/4321/revisions/12/files/', content=self.change_revision_resp, ) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] expected_change = dict(TestGerritChangeSource.expected_change) expected_change['properties'] = dict(expected_change['properties']) expected_change['properties']['event.source'] = 'GerritEventLogPoller' for k, v in c.items(): if k == 'files': continue self.assertEqual(expected_change[k], v) self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP) self.assertEqual(set(c['files']), {'/COMMIT_MSG', 'file1'}) # do a second poll, it should ask for the next events self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': self.EVENT_FORMATTED}, content_json=dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="dustin@mozilla.com"), url="http://buildbot.net", subject="fix 1234" ), eventCreatedOn=self.EVENT_TIMESTAMP + 1, patchSet=dict(revision="abcdef", number="12"))) self._http.expect( method='get', ep='/changes/4321/revisions/12/files/', content=self.change_revision_resp, ) yield self.changesource.poll() self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP + 1) change_revision_dict = { '/COMMIT_MSG': {'status': 'A', 'lines_inserted': 9, 'size_delta': 1, 'size': 1}, 'file1': {'lines_inserted': 9, 'lines_deleted': 2, 'size_delta': 1, 'size': 1}, } change_revision_resp = b')]}\n' + json.dumps(change_revision_dict).encode('utf8') @defer.inlineCallbacks def test_getFiles(self): yield self.newChangeSource(get_files=True) yield self.startChangeSource() self._http.expect( method='get', ep='/changes/100/revisions/1/files/', content=self.change_revision_resp, ) files = yield self.changesource.getFiles(100, 1) self.assertEqual(set(files), {'/COMMIT_MSG', 'file1'}) class TestGerritChangeFilter(unittest.TestCase): def test_basic(self): ch = Change(**TestGerritChangeSource.expected_change) f = gerritchangesource.GerritChangeFilter( branch=["br"], eventtype=["patchset-created"]) self.assertTrue(f.filter_change(ch)) f = gerritchangesource.GerritChangeFilter( branch="br2", eventtype=["patchset-created"]) self.assertFalse(f.filter_change(ch)) f = gerritchangesource.GerritChangeFilter( branch="br", eventtype="ref-updated") self.assertFalse(f.filter_change(ch)) self.assertEqual( repr(f), '') buildbot-2.6.0/master/buildbot/test/unit/test_changes_github.py000066400000000000000000000453111361162603000247450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.trial import unittest from buildbot.changes.github import GitHubPullrequestPoller from buildbot.config import ConfigErrors from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import changesource from buildbot.test.util.misc import TestReactorMixin gitJsonPayloadSinglePullrequest = """ { "html_url": "https://github.com/buildbot/buildbot/pull/4242", "number": 4242, "state": "open", "locked": false, "title": "Update the README with new information", "user": { "login": "defunkt" }, "body": "This is a pretty simple change that we need to pull into master.", "updated_at": "2017-01-25T22:36:21Z", "head": { "ref": "defunkt/change", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "repo": { "name": "buildbot", "full_name": "defunkt/buildbot", "fork": true, "private": false, "git_url": "git://github.com/defunkt/buildbot.git", "ssh_url": "git@github.com:defunkt/buildbot.git", "clone_url": "https://github.com/defunkt/buildbot.git", "svn_url": "https://github.com/defunkt/buildbot" } }, "base": { "ref": "master", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "name": "buildbot", "repo": { "full_name": "buildbot/buildbot", "fork": false, "private": false, "git_url": "git://github.com/buildbot/buildbot.git", "ssh_url": "git@github.com:buildbot/buildbot.git", "clone_url": "https://github.com/buildbot/buildbot.git", "svn_url": "https://github.com/buildbot/buildbot" } }, "merged": false, "commits": 42, "mergeable": true, "mergeable_state": "clean", "merged_by": null } """ gitJsonPayloadPullRequests = """ [ { "html_url": "https://github.com/buildbot/buildbot/pull/4242", "number": 4242, "locked": false, "title": "Update the README with new information", "user": { "login": "defunkt" }, "body": "This is a pretty simple change that we need to pull into master.", "updated_at": "2017-01-25T22:36:21Z", "head": { "ref": "defunkt/change", "sha": "4c9a7f03e04e551a5e012064b581577f949dd3a4", "repo": { "name": "buildbot", "git_url": "git://github.com/defunkt/buildbot.git", "ssh_url": "git@github.com:defunkt/buildbot.git", "clone_url": "https://github.com/defunkt/buildbot.git", "svn_url": "https://github.com/defunkt/buildbot" } }, "base": { "ref": "master", "name": "buildbot", "repo": { "git_url": "git://github.com/buildbot/buildbot.git", "ssh_url": "git@github.com:buildbot/buildbot.git", "clone_url": "https://github.com/buildbot/buildbot.git", "svn_url": "https://github.com/buildbot/buildbot" } } } ] """ gitJsonPayloadFiles = """ [ { "filename": "README.md" } ] """ gitJsonPayloadAuthors = """ [ { "commit": { "author": { "name": "defunkt", "email": "defunkt@defunkt.null" } } } ] """ gitJsonPayloadCommitters = """ [ { "commit": { "committer": { "name": "defunktc", "email": "defunktc@defunkt.null" } } } ] """ _CT_ENCODED = b'application/x-www-form-urlencoded' _CT_JSON = b'application/json' _GH_PARSED_PROPS = { 'github.head.sha': '4c9a7f03e04e551a5e012064b581577f949dd3a4', 'github.state': 'open', 'github.number': 4242, 'github.merged': False, 'github.base.repo.full_name': 'buildbot/buildbot', 'github.base.ref': 'master', 'github.base.sha': '4c9a7f03e04e551a5e012064b581577f949dd3a4', 'github.head.repo.full_name': 'defunkt/buildbot', 'github.mergeable_state': 'clean', 'github.mergeable': True, 'github.head.ref': 'defunkt/change', 'github.title': 'Update the README with new information', 'github.merged_by': None } class TestGitHubPullrequestPoller(changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpChangeSource() yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() yield self.tearDownChangeSource() @defer.inlineCallbacks def newChangeSource(self, owner, repo, endpoint='https://api.github.com', **kwargs): http_headers = {'User-Agent': 'Buildbot'} token = kwargs.get('token', None) if token: http_headers.update({'Authorization': 'token ' + token}) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, endpoint, headers=http_headers) self.changesource = GitHubPullrequestPoller(owner, repo, **kwargs) @defer.inlineCallbacks def startChangeSource(self): yield self.changesource.setServiceParent(self.master) yield self.attachChangeSource(self.changesource) def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) @defer.inlineCallbacks def test_describe(self): yield self.newChangeSource('defunkt', 'defunkt') yield self.startChangeSource() self.assertEqual( "GitHubPullrequestPoller watching the GitHub repository {}/{}". format('defunkt', 'defunkt'), self.changesource.describe()) @defer.inlineCallbacks def test_default_name(self): yield self.newChangeSource('defunkt', 'defunkt') yield self.startChangeSource() self.assertEqual("GitHubPullrequestPoller:{}/{}".format( 'defunkt', 'defunkt'), self.changesource.name) @defer.inlineCallbacks def test_custom_name(self): yield self.newChangeSource('defunkt', 'defunkt', name="MyName") yield self.startChangeSource() self.assertEqual("MyName", self.changesource.name) @defer.inlineCallbacks def test_SimplePR(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_wrongBranch(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', branches=['wrongBranch']) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_baseURL(self): yield self.newChangeSource( 'defunkt', 'defunkt', endpoint='https://my.other.endpoint', token='1234', baseURL='https://my.other.endpoint/', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_PRfilter(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', pullrequest_filter=lambda pr: pr['number'] == 1337 ) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_failCommitters(self): yield self.newChangeSource('defunkt', 'defunkt', token='1234') self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads("[{}]")) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads("[{}]")) yield self.startChangeSource() yield self.assertFailure(self.changesource.poll(), KeyError) @defer.inlineCallbacks def test_failFiles(self): yield self.newChangeSource('defunkt', 'defunkt', token='1234') self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads("[{}]")) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads("[{}]")) yield self.startChangeSource() yield self.assertFailure(self.changesource.poll(), KeyError) @defer.inlineCallbacks def test_wrongRepoLink(self): yield self.assertFailure( self.newChangeSource( 'defunkt', 'defunkt', token='1234', repository_type='defunkt'), ConfigErrors) @defer.inlineCallbacks def test_magicLink(self): yield self.newChangeSource( 'defunkt', 'defunkt', magic_link=True, token='1234', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'refs/pull/4242/merge') self.assertEqual(change['repository'], 'https://github.com/buildbot/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") @defer.inlineCallbacks def test_AuthormissingEmail(self): yield self.newChangeSource( 'defunkt', 'defunkt', token='1234', github_property_whitelist=["github.*"]) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls', content_json=json.loads(gitJsonPayloadPullRequests)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242', content_json=json.loads(gitJsonPayloadSinglePullrequest)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadAuthors)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/commits', content_json=json.loads(gitJsonPayloadCommitters)) self._http.expect( method='get', ep='/repos/defunkt/defunkt/pulls/4242/files', content_json=json.loads(gitJsonPayloadFiles)) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['author'], 'defunkt ') self.assertEqual(change['revision'], '4c9a7f03e04e551a5e012064b581577f949dd3a4') self.assertEqual(change['revlink'], 'https://github.com/buildbot/buildbot/pull/4242') self.assertEqual(change['branch'], 'defunkt/change') self.assertEqual(change['repository'], 'https://github.com/defunkt/buildbot.git') self.assertEqual(change['files'], ['README.md']) self.assertEqual(change['committer'], 'defunktc ') self.assertDictSubset(_GH_PARSED_PROPS, change['properties']) self.assertEqual(change["comments"], "GitHub Pull Request #4242 (42 commits)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") buildbot-2.6.0/master/buildbot/test/unit/test_changes_gitpoller.py000066400000000000000000002214531361162603000254670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import gitpoller from buildbot.test.fake.private_tempdir import MockPrivateTemporaryDirectory from buildbot.test.util import changesource from buildbot.test.util import config from buildbot.test.util import gpo from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes # Test that environment variables get propagated to subprocesses (See #2116) os.environ['TEST_THAT_ENVIRONMENT_GETS_PASSED_TO_SUBPROCESSES'] = 'TRUE' class GitOutputParsing(gpo.GetProcessOutputMixin, unittest.TestCase): """Test GitPoller methods for parsing git output""" def setUp(self): self.poller = gitpoller.GitPoller('git@example.com:~foo/baz.git') self.setUpGetProcessOutput() dummyRevStr = '12345abcde' def _perform_git_output_test(self, methodToTest, args, desiredGoodOutput, desiredGoodResult, emptyRaisesException=True): # make this call to self.patch here so that we raise a SkipTest if it # is not supported self.expectCommands( gpo.Expect('git', *args) .path('gitpoller-work'), ) d = defer.succeed(None) @d.addCallback def call_empty(_): # we should get an Exception with empty output from git return methodToTest(self.dummyRevStr) def cb_empty(_): if emptyRaisesException: self.fail( "getProcessOutput should have failed on empty output") def eb_empty(f): if not emptyRaisesException: self.fail( "getProcessOutput should NOT have failed on empty output") d.addCallbacks(cb_empty, eb_empty) d.addCallback(lambda _: self.assertAllCommandsRan()) # and the method shouldn't suppress any exceptions self.expectCommands( gpo.Expect('git', *args) .path('gitpoller-work') .exit(1), ) @d.addCallback def call_exception(_): return methodToTest(self.dummyRevStr) def cb_exception(_): self.fail("getProcessOutput should have failed on stderr output") def eb_exception(f): pass d.addCallbacks(cb_exception, eb_exception) d.addCallback(lambda _: self.assertAllCommandsRan()) # finally we should get what's expected from good output self.expectCommands( gpo.Expect('git', *args) .path('gitpoller-work') .stdout(desiredGoodOutput) ) @d.addCallback def call_desired(_): return methodToTest(self.dummyRevStr) @d.addCallback def cb_desired(r): self.assertEqual(r, desiredGoodResult) # check types if isinstance(r, str): self.assertIsInstance(r, str) elif isinstance(r, list): [self.assertIsInstance(e, str) for e in r] d.addCallback(lambda _: self.assertAllCommandsRan()) return d def test_get_commit_author(self): authorStr = 'Sammy Jankis ' authorBytes = unicode2bytes(authorStr) return self._perform_git_output_test(self.poller._get_commit_author, ['log', '--no-walk', '--format=%aN <%aE>', self.dummyRevStr, '--'], authorBytes, authorStr) def test_get_commit_committer(self): committerStr = 'Sammy Jankis ' committerBytes = unicode2bytes(committerStr) return self._perform_git_output_test(self.poller._get_commit_committer, ['log', '--no-walk', '--format=%cN <%cE>', self.dummyRevStr, '--'], committerBytes, committerStr) def _test_get_commit_comments(self, commentStr): commentBytes = unicode2bytes(commentStr) return self._perform_git_output_test(self.poller._get_commit_comments, ['log', '--no-walk', '--format=%s%n%b', self.dummyRevStr, '--'], commentBytes, commentStr, emptyRaisesException=False) def test_get_commit_comments(self): comments = ['this is a commit message\n\nthat is multiline', 'single line message', ''] return defer.DeferredList([self._test_get_commit_comments(commentStr) for commentStr in comments]) def test_get_commit_files(self): filesBytes = b'\n\nfile1\nfile2\n"\146ile_octal"\nfile space' filesRes = ['file1', 'file2', 'file_octal', 'file space'] return self._perform_git_output_test(self.poller._get_commit_files, ['log', '--name-only', '--no-walk', '--format=%n', self.dummyRevStr, '--'], filesBytes, filesRes, emptyRaisesException=False) def test_get_commit_files_with_space_in_changed_files(self): filesBytes = b'normal_directory/file1\ndirectory with space/file2' filesStr = bytes2unicode(filesBytes) return self._perform_git_output_test( self.poller._get_commit_files, ['log', '--name-only', '--no-walk', '--format=%n', self.dummyRevStr, '--'], filesBytes, [l for l in filesStr.splitlines() if l.strip()], emptyRaisesException=False, ) def test_get_commit_timestamp(self): stampBytes = b'1273258009' stampStr = bytes2unicode(stampBytes) return self._perform_git_output_test(self.poller._get_commit_timestamp, ['log', '--no-walk', '--format=%ct', self.dummyRevStr, '--'], stampBytes, float(stampStr)) # _get_changes is tested in TestGitPoller, below class TestGitPollerBase(gpo.GetProcessOutputMixin, changesource.ChangeSourceMixin, logging.LoggingMixin, TestReactorMixin, unittest.TestCase): REPOURL = 'git@example.com:~foo/baz.git' REPOURL_QUOTED = 'git%40example.com%3A%7Efoo%2Fbaz.git' def createPoller(self): # this is overridden in TestGitPollerWithSshPrivateKey return gitpoller.GitPoller(self.REPOURL) @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpGetProcessOutput() yield self.setUpChangeSource() self.poller = self.createPoller() yield self.poller.setServiceParent(self.master) def tearDown(self): return self.tearDownChangeSource() class TestGitPoller(TestGitPollerBase): def test_describe(self): self.assertSubstring("GitPoller", self.poller.describe()) def test_name(self): self.assertEqual(bytes2unicode(self.REPOURL), bytes2unicode(self.poller.name)) # and one with explicit name... other = gitpoller.GitPoller(self.REPOURL, name="MyName") self.assertEqual("MyName", other.name) @defer.inlineCallbacks def test_checkGitFeatures_git_not_installed(self): self.setUpLogging() self.expectCommands( gpo.Expect('git', '--version') .stdout(b'Command not found'), ) yield self.assertFailure(self.poller._checkGitFeatures(), EnvironmentError) self.assertAllCommandsRan() @defer.inlineCallbacks def test_checkGitFeatures_git_bad_version(self): self.setUpLogging() self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git ') ) yield self.assertFailure(self.poller._checkGitFeatures(), EnvironmentError) self.assertAllCommandsRan() @defer.inlineCallbacks def test_poll_initial(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) def test_poll_failInit(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work') .exit(1), ) d = self.assertFailure(self.poller.poll(), EnvironmentError) d.addCallback(lambda _: self.assertAllCommandsRan()) return d def test_poll_failFetch(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .exit(1), ) d = self.assertFailure(self.poller.poll(), EnvironmentError) d.addCallback(lambda _: self.assertAllCommandsRan()) return d @defer.inlineCallbacks def test_poll_failRevParse(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .exit(1), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(len(self.flushLoggedErrors()), 1) self.assertEqual(self.poller.lastRev, {}) @defer.inlineCallbacks def test_poll_failLog(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .exit(1), ) # do the poll self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930' } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(len(self.flushLoggedErrors()), 1) self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) def test_poll_GitError(self): # Raised when git exits with status code 128. See issue 2468 self.expectCommands( gpo.Expect('git', 'init', '--bare', 'gitpoller-work') .exit(128), ) d = self.assertFailure(self.poller._dovccmd('init', ['--bare', 'gitpoller-work']), gitpoller.GitError) d.addCallback(lambda _: self.assertAllCommandsRan()) return d def test_poll_GitError_log(self): self.setUpLogging() self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work') .exit(128), ) d = self.poller.poll() d.addCallback(lambda _: self.assertAllCommandsRan()) self.assertLogged("command.*on repourl.*failed.*exit code 128.*") return d @defer.inlineCallbacks def test_poll_nothingNew(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.addGetProcessOutputExpectEnv({'ENVVAR': 'TRUE'}) self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'no interesting output'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--') .path('gitpoller-work') .stdout(b''), ) self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' } yield self.poller.poll() self.assertAllCommandsRan() self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_multipleBranches_initial(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/heads/release\n' b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), ) # do the poll self.poller.branches = ['master', 'release', 'not_on_remote'] yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) @defer.inlineCallbacks def test_poll_multipleBranches(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/heads/release\n' b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), gpo.Expect('git', 'log', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2' ])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['master', 'release'] self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) self.assertEqual(self.master.data.updates.changesAdded, [ { 'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:64a5dc2a', 'committer': 'by:64a5dc2a', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/64a'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:9118f4ab', 'committer': 'by:9118f4ab', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/911'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, } ]) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_default(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--') .path('gitpoller-work') .stdout(b''), ) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(len(self.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--') .path('gitpoller-work') .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', } self.poller.buildPushesWithNoCommits = True yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true_fast_forward(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--') .path('gitpoller-work') .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c' } self.poller.buildPushesWithNoCommits = True yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_multipleBranches_buildPushesWithNoCommits_true_not_tip(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/release\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', '--') .path('gitpoller-work') .stdout(b''), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = ['release'] self.poller.lastRev = { 'master': '0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', } self.poller.buildPushesWithNoCommits = True yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '0ba9d553b7217ab4bbad89ad56dc0332c7d57a8c', 'release': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [ {'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'release', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009}] ) @defer.inlineCallbacks def test_poll_allBranches_single(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect( 'git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') @defer.inlineCallbacks def test_poll_noChanges(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.addGetProcessOutputExpectEnv({'ENVVAR': 'TRUE'}) self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'no interesting output'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '--') .path('gitpoller-work') .stdout(b''), ) self.poller.lastRev = { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_allBranches_multiple(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\trefs/heads/master', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\trefs/heads/release', ])), gpo.Expect( 'git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master', '+release:refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect( 'git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), gpo.Expect( 'git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/release') .path('gitpoller-work') .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), gpo.Expect( 'git', 'log', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '--') .path('gitpoller-work') .stdout(b'\n'.join([b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/heads/release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'refs/heads/release': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 3) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') self.assertEqual(added[2]['author'], 'by:9118f4ab') self.assertEqual(added[2]['committer'], 'by:9118f4ab') self.assertEqual(added[2]['when_timestamp'], 1273258009) self.assertEqual(added[2]['comments'], 'hello!') self.assertEqual(added[2]['files'], ['/etc/911']) self.assertEqual(added[2]['src'], 'git') @defer.inlineCallbacks def test_poll_callableFilteredBranches(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\trefs/heads/master', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\trefs/heads/release', ])), gpo.Expect( 'git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect( 'git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])) ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll class TestCallable: def __call__(self, branch): return branch == "refs/heads/master" self.poller.branches = TestCallable() self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/heads/release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } yield self.poller.poll() self.assertAllCommandsRan() # The release branch id should remain unchanged, # because it was ignored. self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'refs/heads/release': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') @defer.inlineCallbacks def test_poll_branchFilter(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'\n'.join([ b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/pull/410/merge', b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2\t' b'refs/pull/410/head', ])), gpo.Expect( 'git', 'fetch', self.REPOURL, '+refs/pull/410/head:refs/buildbot/' + self.REPOURL_QUOTED + '/refs/pull/410/head') .path('gitpoller-work'), gpo.Expect( 'git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/refs/pull/410/head') .path('gitpoller-work') .stdout(b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'), gpo.Expect( 'git', 'log', '--format=%H', '9118f4ab71963d23d02d4bdc54876ac8bf05acf2', '^bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([b'9118f4ab71963d23d02d4bdc54876ac8bf05acf2'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) def pullFilter(branch): """ Note that this isn't useful in practice, because it will only pick up *changes* to pull requests, not the original request. """ return re.match('^refs/pull/[0-9]*/head$', branch) # do the poll self.poller.branches = pullFilter self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/pull/410/head': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', 'refs/pull/410/head': '9118f4ab71963d23d02d4bdc54876ac8bf05acf2' }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 1) self.assertEqual(added[0]['author'], 'by:9118f4ab') self.assertEqual(added[0]['committer'], 'by:9118f4ab') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['files'], ['/etc/911']) self.assertEqual(added[0]['src'], 'git') @defer.inlineCallbacks def test_poll_old(self): # Test that environment variables get propagated to subprocesses # (See #2116) self.patch(os, 'environ', {'ENVVAR': 'TRUE'}) self.addGetProcessOutputExpectEnv({'ENVVAR': 'TRUE'}) # patch out getProcessOutput and getProcessOutputAndValue for the # benefit of the _get_changes method self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'no interesting output'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect('git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241' ])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.lastRev = { 'master': 'fa3ae8ed68e664d4db24798611b352e3c6509930' } yield self.poller.poll() # check the results self.assertEqual(self.poller.lastRev, { 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'by:4423cdbc', 'committer': 'by:4423cdbc', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/442'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '4423cdbcbb89c14e50dd5f4152415afd686c5241', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }, { 'author': 'by:64a5dc2a', 'committer': 'by:64a5dc2a', 'branch': 'master', 'category': None, 'codebase': None, 'comments': 'hello!', 'files': ['/etc/64a'], 'project': '', 'properties': {}, 'repository': 'git@example.com:~foo/baz.git', 'revision': '64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', 'revlink': '', 'src': 'git', 'when_timestamp': 1273258009, }]) self.assertAllCommandsRan() self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': '4423cdbcbb89c14e50dd5f4152415afd686c5241' }) @defer.inlineCallbacks def test_poll_callableCategory(self): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\n'), gpo.Expect( 'git', 'log', '--format=%H', '4423cdbcbb89c14e50dd5f4152415afd686c5241', '^fa3ae8ed68e664d4db24798611b352e3c6509930', '--') .path('gitpoller-work') .stdout(b'\n'.join([ b'64a5dc2a4bd4f558b5dd193d47c83c7d7abc9a1a', b'4423cdbcbb89c14e50dd5f4152415afd686c5241'])), ) # and patch out the _get_commit_foo methods which were already tested # above def timestamp(rev): return defer.succeed(1273258009) self.patch(self.poller, '_get_commit_timestamp', timestamp) def author(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_author', author) def committer(rev): return defer.succeed('by:' + rev[:8]) self.patch(self.poller, '_get_commit_committer', committer) def files(rev): return defer.succeed(['/etc/' + rev[:3]]) self.patch(self.poller, '_get_commit_files', files) def comments(rev): return defer.succeed('hello!') self.patch(self.poller, '_get_commit_comments', comments) # do the poll self.poller.branches = True def callableCategory(chdict): return chdict['revision'][:6] self.poller.category = callableCategory self.poller.lastRev = { 'refs/heads/master': 'fa3ae8ed68e664d4db24798611b352e3c6509930', } yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'refs/heads/master': '4423cdbcbb89c14e50dd5f4152415afd686c5241', }) added = self.master.data.updates.changesAdded self.assertEqual(len(added), 2) self.assertEqual(added[0]['author'], 'by:4423cdbc') self.assertEqual(added[0]['committer'], 'by:4423cdbc') self.assertEqual(added[0]['when_timestamp'], 1273258009) self.assertEqual(added[0]['comments'], 'hello!') self.assertEqual(added[0]['branch'], 'master') self.assertEqual(added[0]['files'], ['/etc/442']) self.assertEqual(added[0]['src'], 'git') self.assertEqual(added[0]['category'], '4423cd') self.assertEqual(added[1]['author'], 'by:64a5dc2a') self.assertEqual(added[1]['committer'], 'by:64a5dc2a') self.assertEqual(added[1]['when_timestamp'], 1273258009) self.assertEqual(added[1]['comments'], 'hello!') self.assertEqual(added[1]['files'], ['/etc/64a']) self.assertEqual(added[1]['src'], 'git') self.assertEqual(added[1]['category'], '64a5dc') @defer.inlineCallbacks def test_startService(self): yield self.poller.startService() self.assertEqual( self.poller.workdir, os.path.join('basedir', 'gitpoller-work')) self.assertEqual(self.poller.lastRev, {}) yield self.poller.stopService() @defer.inlineCallbacks def test_startService_loadLastRev(self): self.master.db.state.fakeState( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={"master": "fa3ae8ed68e664d4db24798611b352e3c6509930"}, ) yield self.poller.startService() self.assertEqual(self.poller.lastRev, { "master": "fa3ae8ed68e664d4db24798611b352e3c6509930" }) yield self.poller.stopService() class TestGitPollerWithSshPrivateKey(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_check_git_features_ssh_1_7(self, write_local_file_mock, temp_dir_mock): self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 1.7.5\n'), ) yield self.assertFailure(self.poller._checkGitFeatures(), EnvironmentError) self.assertAllCommandsRan() self.assertEqual(len(temp_dir_mock.dirs), 0) write_local_file_mock.assert_not_called() @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 2.10.0\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}"'.format(key_path), 'ls-remote', '--refs', self.REPOURL), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}"'.format(key_path), 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_3(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 2.3.0\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', 'ls-remote', '--refs', self.REPOURL) .stdout(b'4423cdbcbb89c14e50dd5f4152415afd686c5241\t' b'refs/heads/master\n'), gpo.Expect('git', 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .env({'GIT_SSH_COMMAND': 'ssh -i "{0}"'.format(key_path)}), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_failFetch_git_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') # make sure we cleanup the private key when fetch fails self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 2.10.0\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}"'.format(key_path), 'ls-remote', '--refs', self.REPOURL), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}"'.format(key_path), 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .exit(1), ) yield self.assertFailure(self.poller.poll(), EnvironmentError) self.assertAllCommandsRan() temp_dir_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) write_local_file_mock.assert_called_with(key_path, 'ssh-key', mode=0o400) class TestGitPollerWithSshHostKey(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key', sshHostKey='ssh-host-key') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') known_hosts_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-known-hosts') self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 2.10.0\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format( key_path, known_hosts_path), 'ls-remote', '--refs', self.REPOURL), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format( key_path, known_hosts_path), 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) expected_file_writes = [ mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, '* ssh-host-key'), mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, '* ssh-host-key'), ] self.assertEqual(expected_file_writes, write_local_file_mock.call_args_list) class TestGitPollerWithSshKnownHosts(TestGitPollerBase): def createPoller(self): return gitpoller.GitPoller(self.REPOURL, sshPrivateKey='ssh-key', sshKnownHosts='ssh-known-hosts') @mock.patch('buildbot.util.private_tempdir.PrivateTemporaryDirectory', new_callable=MockPrivateTemporaryDirectory) @mock.patch('buildbot.changes.gitpoller.writeLocalFile') @defer.inlineCallbacks def test_poll_initial_2_10(self, write_local_file_mock, temp_dir_mock): key_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-key') known_hosts_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@', 'ssh-known-hosts') self.expectCommands( gpo.Expect('git', '--version') .stdout(b'git version 2.10.0\n'), gpo.Expect('git', 'init', '--bare', 'gitpoller-work'), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format( key_path, known_hosts_path), 'ls-remote', '--refs', self.REPOURL), gpo.Expect('git', '-c', 'core.sshCommand=ssh -i "{0}" ' '-o "UserKnownHostsFile={1}"'.format( key_path, known_hosts_path), 'fetch', self.REPOURL, '+master:refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work'), gpo.Expect('git', 'rev-parse', 'refs/buildbot/' + self.REPOURL_QUOTED + '/master') .path('gitpoller-work') .stdout(b'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5\n'), ) yield self.poller.poll() self.assertAllCommandsRan() self.assertEqual(self.poller.lastRev, { 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) self.master.db.state.assertStateByClass( name=bytes2unicode(self.REPOURL), class_name='GitPoller', lastRev={ 'master': 'bf0b01df6d00ae8d1ffa0b2e2acbe642a6cd35d5' }) temp_dir_path = os.path.join('gitpoller-work', '.buildbot-ssh@@@') self.assertEqual(temp_dir_mock.dirs, [(temp_dir_path, 0o700), (temp_dir_path, 0o700)]) expected_file_writes = [ mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, 'ssh-known-hosts'), mock.call(key_path, 'ssh-key', mode=0o400), mock.call(known_hosts_path, 'ssh-known-hosts'), ] self.assertEqual(expected_file_writes, write_local_file_mock.call_args_list) class TestGitPollerConstructor(unittest.TestCase, config.ConfigErrorsMixin): def test_deprecatedFetchRefspec(self): with self.assertRaisesConfigError( "fetch_refspec is no longer supported"): gitpoller.GitPoller("/tmp/git.git", fetch_refspec='not-supported') def test_oldPollInterval(self): poller = gitpoller.GitPoller("/tmp/git.git", pollinterval=10) self.assertEqual(poller.pollInterval, 10) def test_branches_default(self): poller = gitpoller.GitPoller("/tmp/git.git") self.assertEqual(poller.branches, ["master"]) def test_branches_oldBranch(self): poller = gitpoller.GitPoller("/tmp/git.git", branch='magic') self.assertEqual(poller.branches, ["magic"]) def test_branches(self): poller = gitpoller.GitPoller("/tmp/git.git", branches=['magic', 'marker']) self.assertEqual(poller.branches, ["magic", "marker"]) def test_branches_True(self): poller = gitpoller.GitPoller("/tmp/git.git", branches=True) self.assertEqual(poller.branches, True) def test_only_tags_True(self): poller = gitpoller.GitPoller("/tmp/git.git", only_tags=True) self.assertIsNotNone(poller.branches) def test_branches_andBranch(self): with self.assertRaisesConfigError( "can't specify both branch and branches"): gitpoller.GitPoller("/tmp/git.git", branch='bad', branches=['listy']) def test_branches_and_only_tags(self): with self.assertRaisesConfigError( "can't specify only_tags and branch/branches"): gitpoller.GitPoller("/tmp/git.git", only_tags=True, branches=['listy']) def test_branch_and_only_tags(self): with self.assertRaisesConfigError( "can't specify only_tags and branch/branches"): gitpoller.GitPoller("/tmp/git.git", only_tags=True, branch='bad') def test_gitbin_default(self): poller = gitpoller.GitPoller("/tmp/git.git") self.assertEqual(poller.gitbin, "git") buildbot-2.6.0/master/buildbot/test/unit/test_changes_hgpoller.py000066400000000000000000000353231361162603000253010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import hgpoller from buildbot.test.util import changesource from buildbot.test.util import gpo from buildbot.test.util.misc import TestReactorMixin ENVIRON_2116_KEY = 'TEST_THAT_ENVIRONMENT_GETS_PASSED_TO_SUBPROCESSES' LINESEP_BYTES = os.linesep.encode("ascii") PATHSEP_BYTES = os.pathsep.encode("ascii") class TestHgPollerBase(gpo.GetProcessOutputMixin, changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): usetimestamps = True branches = None bookmarks = None @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # To test that environment variables get propagated to subprocesses # (See #2116) os.environ[ENVIRON_2116_KEY] = 'TRUE' self.setUpGetProcessOutput() yield self.setUpChangeSource() self.remote_repo = 'ssh://example.com/foo/baz' self.remote_hgweb = 'http://example.com/foo/baz/rev/{}' self.repo_ready = True def _isRepositoryReady(): return self.repo_ready self.poller = hgpoller.HgPoller(self.remote_repo, usetimestamps=self.usetimestamps, workdir='/some/dir', branches=self.branches, bookmarks=self.bookmarks, revlink=lambda branch, revision: self.remote_hgweb.format(revision)) yield self.poller.setServiceParent(self.master) self.poller._isRepositoryReady = _isRepositoryReady yield self.master.db.setup() @defer.inlineCallbacks def check_current_rev(self, wished, branch='default'): rev = yield self.poller._getCurrentRev(branch) self.assertEqual(rev, str(wished)) class TestHgPollerBranches(TestHgPollerBase): branches = ['one', 'two'] @defer.inlineCallbacks def test_poll_initial(self): self.expectCommands( gpo.Expect('hg', 'pull', '-b', 'one', '-b', 'two', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'one', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b"73591"), gpo.Expect( 'hg', 'heads', '-r', 'two', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b"22341"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591, 'one') yield self.check_current_rev(22341, 'two') @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. # Let's say there was an intervening commit on an untracked branch, to # make it more interesting. self.expectCommands( gpo.Expect('hg', 'pull', '-b', 'one', '-b', 'two', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'one', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'6' + LINESEP_BYTES), gpo.Expect('hg', 'log', '-r', '4::6', '--template={rev}:{node}\\n') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'4:1aaa5', b'6:784bd', ])), gpo.Expect('hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment', b''])), gpo.Expect( 'hg', 'heads', '-r', 'two', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'3' + LINESEP_BYTES), ) yield self.poller._setCurrentRev(3, 'two') yield self.poller._setCurrentRev(4, 'one') yield self.poller.poll() yield self.check_current_rev(6, 'one') self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['revlink'], 'http://example.com/foo/baz/rev/784bd') self.assertEqual(change['comments'], 'Comment') class TestHgPollerBookmarks(TestHgPollerBase): bookmarks = ['one', 'two'] @defer.inlineCallbacks def test_poll_initial(self): self.expectCommands( gpo.Expect('hg', 'pull', '-B', 'one', '-B', 'two', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'one', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b"73591"), gpo.Expect( 'hg', 'heads', '-r', 'two', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b"22341"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591, 'one') yield self.check_current_rev(22341, 'two') @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. # Let's say there was an intervening commit on an untracked branch, to # make it more interesting. self.expectCommands( gpo.Expect('hg', 'pull', '-B', 'one', '-B', 'two', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'one', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'6' + LINESEP_BYTES), gpo.Expect('hg', 'log', '-r', '4::6', '--template={rev}:{node}\\n') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'4:1aaa5', b'6:784bd', ])), gpo.Expect('hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment', b''])), gpo.Expect( 'hg', 'heads', '-r', 'two', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'3' + LINESEP_BYTES), ) yield self.poller._setCurrentRev(3, 'two') yield self.poller._setCurrentRev(4, 'one') yield self.poller.poll() yield self.check_current_rev(6, 'one') self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment') class TestHgPoller(TestHgPollerBase): def tearDown(self): del os.environ[ENVIRON_2116_KEY] return self.tearDownChangeSource() def gpoFullcommandPattern(self, commandName, *expected_args): """Match if the command is commandName and arg list start as expected. This allows to test a bit more if expected GPO are issued, be it by obscure failures due to the result not being given. """ def matchesSubcommand(bin, given_args, **kwargs): return bin == commandName and tuple( given_args[:len(expected_args)]) == expected_args return matchesSubcommand def test_describe(self): self.assertSubstring("HgPoller", self.poller.describe()) def test_name(self): self.assertEqual(self.remote_repo, self.poller.name) # and one with explicit name... other = hgpoller.HgPoller( self.remote_repo, name="MyName", workdir='/some/dir') self.assertEqual("MyName", other.name) # and one with explicit branches... other = hgpoller.HgPoller( self.remote_repo, branches=["b1", "b2"], workdir='/some/dir') self.assertEqual(self.remote_repo + "_b1_b2", other.name) def test_hgbin_default(self): self.assertEqual(self.poller.hgbin, "hg") @defer.inlineCallbacks def test_poll_initial(self): self.repo_ready = False # Test that environment variables get propagated to subprocesses # (See #2116) expected_env = {ENVIRON_2116_KEY: 'TRUE'} self.addGetProcessOutputExpectEnv(expected_env) self.expectCommands( gpo.Expect('hg', 'init', '/some/dir'), gpo.Expect('hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'default', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b"73591"), ) # do the poll yield self.poller.poll() # check the results self.assertEqual(len(self.master.data.updates.changesAdded), 0) yield self.check_current_rev(73591) @defer.inlineCallbacks def test_poll_several_heads(self): # If there are several heads on the named branch, the poller mustn't # climb (good enough for now, ideally it should even go to the common # ancestor) self.expectCommands( gpo.Expect('hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'default', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'5' + LINESEP_BYTES + b'6' + LINESEP_BYTES) ) yield self.poller._setCurrentRev(3) # do the poll: we must stay at rev 3 yield self.poller.poll() yield self.check_current_rev(3) @defer.inlineCallbacks def test_poll_regular(self): # normal operation. There's a previous revision, we get a new one. self.expectCommands( gpo.Expect('hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'default', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'5' + LINESEP_BYTES), gpo.Expect('hg', 'log', '-r', '4::5', '--template={rev}:{node}\\n') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'4:1aaa5', b'5:784bd', ])), gpo.Expect('hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment for rev 5', b''])), ) yield self.poller._setCurrentRev(4) yield self.poller.poll() yield self.check_current_rev(5) self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment for rev 5') @defer.inlineCallbacks def test_poll_force_push(self): # There's a previous revision, but not linked with new rev self.expectCommands( gpo.Expect('hg', 'pull', '-b', 'default', 'ssh://example.com/foo/baz') .path('/some/dir'), gpo.Expect( 'hg', 'heads', '-r', 'default', '--template={rev}' + os.linesep) .path('/some/dir').stdout(b'5' + LINESEP_BYTES), gpo.Expect('hg', 'log', '-r', '4::5', '--template={rev}:{node}\\n') .path('/some/dir').stdout(b""), gpo.Expect('hg', 'log', '-r', '5', '--template={rev}:{node}\\n') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'5:784bd', ])), gpo.Expect('hg', 'log', '-r', '784bd', '--template={date|hgdate}' + os.linesep + '{author}' + os.linesep + "{files % '{file}" + os.pathsep + "'}" + os.linesep + '{desc|strip}') .path('/some/dir').stdout(LINESEP_BYTES.join([ b'1273258009.0 -7200', b'Joe Test ', b'file1 file2', b'Comment for rev 5', b''])), ) yield self.poller._setCurrentRev(4) yield self.poller.poll() yield self.check_current_rev(5) self.assertEqual(len(self.master.data.updates.changesAdded), 1) change = self.master.data.updates.changesAdded[0] self.assertEqual(change['revision'], '784bd') self.assertEqual(change['comments'], 'Comment for rev 5') class HgPollerNoTimestamp(TestHgPoller): """ Test HgPoller() without parsing revision commit timestamp """ usetimestamps = False buildbot-2.6.0/master/buildbot/test/unit/test_changes_mail.py000066400000000000000000000102001361162603000243720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import mail from buildbot.test.util import changesource from buildbot.test.util import dirs from buildbot.test.util.misc import TestReactorMixin class TestMaildirSource(changesource.ChangeSourceMixin, dirs.DirsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.maildir = os.path.abspath("maildir") yield self.setUpChangeSource() yield self.setUpDirs(self.maildir) def populateMaildir(self): "create a fake maildir with a fake new message ('newmsg') in it" newdir = os.path.join(self.maildir, "new") os.makedirs(newdir) curdir = os.path.join(self.maildir, "cur") os.makedirs(curdir) fake_message = "Subject: test\n\nthis is a test" mailfile = os.path.join(newdir, "newmsg") with open(mailfile, "w") as f: f.write(fake_message) def assertMailProcessed(self): self.assertFalse( os.path.exists(os.path.join(self.maildir, "new", "newmsg"))) self.assertTrue( os.path.exists(os.path.join(self.maildir, "cur", "newmsg"))) @defer.inlineCallbacks def tearDown(self): yield self.tearDownDirs() yield self.tearDownChangeSource() # tests def test_describe(self): mds = mail.MaildirSource(self.maildir) self.assertSubstring(self.maildir, mds.describe()) @defer.inlineCallbacks def test_messageReceived_svn(self): self.populateMaildir() mds = mail.MaildirSource(self.maildir) self.attachChangeSource(mds) # monkey-patch in a parse method def parse(message, prefix): assert 'this is a test' in message.get_payload() return ('svn', dict(author='jimmy')) mds.parse = parse yield mds.messageReceived('newmsg') self.assertMailProcessed() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'jimmy', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': None, 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) @defer.inlineCallbacks def test_messageReceived_bzr(self): self.populateMaildir() mds = mail.MaildirSource(self.maildir) self.attachChangeSource(mds) # monkey-patch in a parse method def parse(message, prefix): assert 'this is a test' in message.get_payload() return ('bzr', dict(author='jimmy')) mds.parse = parse yield mds.messageReceived('newmsg') self.assertMailProcessed() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'jimmy', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': None, 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'bzr', 'when_timestamp': None, }]) buildbot-2.6.0/master/buildbot/test/unit/test_changes_mail_CVSMaildirSource.py000066400000000000000000000165111361162603000276030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from email import message_from_string from email.utils import mktime_tz from email.utils import parsedate_tz from twisted.trial import unittest from buildbot.changes.mail import CVSMaildirSource # # Sample message from CVS version 1.11 # cvs1_11_msg = """From: Andy Howell To: buildbot@example.com Subject: cvs module MyModuleName Date: Sat, 07 Aug 2010 11:11:49 +0000 X-Mailer: Python buildbot-cvs-mail $Revision: 1.3 $ Cvsmode: 1.11 Category: None CVSROOT: :ext:cvshost.example.com:/cvsroot Files: base/module/src/make GNUmakefile,1.362,1.363 Project: MyModuleName Update of /cvsroot/base/module/src/make In directory cvshost:/tmp/cvs-serv10922 Modified Files: GNUmakefile Log Message: Commented out some stuff. """ # # Sample message from CVS version 1.12 # # Paths are handled differently by the two versions # cvs1_12_msg = """Date: Wed, 11 Aug 2010 04:56:44 +0000 From: andy@example.com To: buildbot@example.com Subject: cvs update for project RaiCore X-Mailer: Python buildbot-cvs-mail $Revision: 1.3 $ Cvsmode: 1.12 Category: None CVSROOT: :ext:cvshost.example.com:/cvsroot Files: file1.cpp 1.77 1.78 file2.cpp 1.75 1.76 Path: base/module/src Project: MyModuleName Update of /cvsroot/base/module/src In directory example.com:/tmp/cvs-serv26648/InsightMonAgent Modified Files: file1.cpp file2.cpp Log Message: Changes for changes sake """ class TestCVSMaildirSource(unittest.TestCase): def test_CVSMaildirSource_create_change_from_cvs1_11msg(self): m = message_from_string(cvs1_11_msg) src = CVSMaildirSource('/dev/null') src, chdict = src.parse(m) self.assertNotEqual(chdict, None) self.assertEqual(chdict['author'], 'andy') self.assertEqual(len(chdict['files']), 1) self.assertEqual( chdict['files'][0], 'base/module/src/make/GNUmakefile') self.assertEqual(chdict['comments'], 'Commented out some stuff.\n') self.assertFalse(chdict['isdir']) self.assertEqual(chdict['revision'], '2010-08-07 11:11:49') dateTuple = parsedate_tz('Sat, 07 Aug 2010 11:11:49 +0000') self.assertEqual(chdict['when'], mktime_tz(dateTuple)) self.assertEqual(chdict['branch'], None) self.assertEqual( chdict['repository'], ':ext:cvshost.example.com:/cvsroot') self.assertEqual(chdict['project'], 'MyModuleName') self.assertEqual(len(chdict['properties']), 0) self.assertEqual(src, 'cvs') def test_CVSMaildirSource_create_change_from_cvs1_12msg(self): m = message_from_string(cvs1_12_msg) src = CVSMaildirSource('/dev/null') src, chdict = src.parse(m) self.assertNotEqual(chdict, None) self.assertEqual(chdict['author'], 'andy') self.assertEqual(len(chdict['files']), 2) self.assertEqual(chdict['files'][0], 'base/module/src/file1.cpp') self.assertEqual(chdict['files'][1], 'base/module/src/file2.cpp') self.assertEqual(chdict['comments'], 'Changes for changes sake\n') self.assertFalse(chdict['isdir']) self.assertEqual(chdict['revision'], '2010-08-11 04:56:44') dateTuple = parsedate_tz('Wed, 11 Aug 2010 04:56:44 +0000') self.assertEqual(chdict['when'], mktime_tz(dateTuple)) self.assertEqual(chdict['branch'], None) self.assertEqual( chdict['repository'], ':ext:cvshost.example.com:/cvsroot') self.assertEqual(chdict['project'], 'MyModuleName') self.assertEqual(len(chdict['properties']), 0) self.assertEqual(src, 'cvs') def test_CVSMaildirSource_create_change_from_cvs1_12_with_no_path(self): msg = cvs1_12_msg.replace('Path: base/module/src', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') try: assert src.parse(m)[1] except ValueError: pass else: self.fail('Expect ValueError.') def test_CVSMaildirSource_create_change_with_bad_cvsmode(self): # Branch is indicated after 'Tag:' in modified file list msg = cvs1_11_msg.replace('Cvsmode: 1.11', 'Cvsmode: 9.99') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') try: assert src.parse(m)[1] except ValueError: pass else: self.fail('Expected ValueError') def test_CVSMaildirSource_create_change_with_branch(self): # Branch is indicated after 'Tag:' in modified file list msg = cvs1_11_msg.replace(' GNUmakefile', ' Tag: Test_Branch\n GNUmakefile') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['branch'], 'Test_Branch') def test_CVSMaildirSource_create_change_with_category(self): msg = cvs1_11_msg.replace('Category: None', 'Category: Test category') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['category'], 'Test category') def test_CVSMaildirSource_create_change_with_no_comment(self): # Strip off comments msg = cvs1_11_msg[:cvs1_11_msg.find('Commented out some stuff')] m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['comments'], None) def test_CVSMaildirSource_create_change_with_no_files(self): # A message with no files is likely not for us msg = cvs1_11_msg.replace( 'Files: base/module/src/make GNUmakefile,1.362,1.363', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m) self.assertEqual(chdict, None) def test_CVSMaildirSource_create_change_with_no_project(self): msg = cvs1_11_msg.replace('Project: MyModuleName', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['project'], None) def test_CVSMaildirSource_create_change_with_no_repository(self): msg = cvs1_11_msg.replace( 'CVSROOT: :ext:cvshost.example.com:/cvsroot', '') m = message_from_string(msg) src = CVSMaildirSource('/dev/null') chdict = src.parse(m)[1] self.assertEqual(chdict['repository'], None) def test_CVSMaildirSource_create_change_with_property(self): m = message_from_string(cvs1_11_msg) propDict = {'foo': 'bar'} src = CVSMaildirSource('/dev/null', properties=propDict) chdict = src.parse(m)[1] self.assertEqual(chdict['properties']['foo'], 'bar') buildbot-2.6.0/master/buildbot/test/unit/test_changes_manager.py000066400000000000000000000070001361162603000250660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import base from buildbot.changes import manager from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestChangeManager(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) self.cm = manager.ChangeManager() self.master.startService() yield self.cm.setServiceParent(self.master) self.new_config = mock.Mock() def tearDown(self): return self.master.stopService() def make_sources(self, n, klass=base.ChangeSource, **kwargs): for i in range(n): src = klass(name='ChangeSource %d' % i, **kwargs) yield src @defer.inlineCallbacks def test_reconfigService_add(self): src1, src2 = self.make_sources(2) yield src1.setServiceParent(self.cm) self.new_config.change_sources = [src1, src2] yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(src2.parent, self.cm) self.assertIdentical(src2.master, self.master) @defer.inlineCallbacks def test_reconfigService_remove(self): src1, = self.make_sources(1) yield src1.setServiceParent(self.cm) self.new_config.change_sources = [] self.assertTrue(src1.running) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertFalse(src1.running) @defer.inlineCallbacks def test_reconfigService_change_reconfigurable(self): src1, = self.make_sources(1, base.ReconfigurablePollingChangeSource, pollInterval=1) yield src1.setServiceParent(self.cm) src2, = self.make_sources(1, base.ReconfigurablePollingChangeSource, pollInterval=2) self.new_config.change_sources = [src2] self.assertTrue(src1.running) self.assertEqual(src1.pollInterval, 1) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertTrue(src1.running) self.assertFalse(src2.running) self.assertEqual(src1.pollInterval, 2) @defer.inlineCallbacks def test_reconfigService_change_legacy(self): src1, = self.make_sources(1, base.PollingChangeSource, pollInterval=1) yield src1.setServiceParent(self.cm) src2, = self.make_sources(1, base.PollingChangeSource, pollInterval=2) self.new_config.change_sources = [src2] self.assertTrue(src1.running) self.assertEqual(src1.pollInterval, 1) yield self.cm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertFalse(src1.running) self.assertTrue(src2.running) self.assertEqual(src2.pollInterval, 2) buildbot-2.6.0/master/buildbot/test/unit/test_changes_p4poller.py000066400000000000000000000465541361162603000252360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import dateutil.tz from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.changes.p4poller import P4PollerError from buildbot.changes.p4poller import P4Source from buildbot.changes.p4poller import get_simple_split from buildbot.test.util import changesource from buildbot.test.util import config from buildbot.test.util import gpo from buildbot.test.util.misc import TestReactorMixin from buildbot.util import datetime2epoch first_p4changes = \ b"""Change 1 on 2006/04/13 by slamb@testclient 'first rev' """ second_p4changes = \ b"""Change 3 on 2006/04/13 by bob@testclient 'short desc truncated' Change 2 on 2006/04/13 by slamb@testclient 'bar' """ third_p4changes = \ b"""Change 5 on 2006/04/13 by mpatel@testclient 'first rev' """ fourth_p4changes = \ b"""Change 6 on 2006/04/14 by mpatel@testclient 'bar \xd0\x91' """ p4_describe_2 = \ b"""Change 2 by slamb@testclient on 2006/04/13 21:46:23 \tcreation Affected files ... ... //depot/myproject/trunk/whatbranch#1 add ... //depot/otherproject/trunk/something#1 add """ p4_describe_3 = \ """Change 3 by bob@testclient on 2006/04/13 21:51:39 \tshort desc truncated because this is a long description. \tASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes. Affected files ... ... //depot/myproject/branch_b/branch_b_file#1 add ... //depot/myproject/branch_b/whatbranch#1 branch ... //depot/myproject/branch_c/whatbranch#1 branch """ p4_describe_4 = \ b"""Change 4 by mpatel@testclient on 2006/04/13 21:55:39 \tThis is a multiline comment with tabs and spaces \t \tA list: \t Item 1 \t\tItem 2 Affected files ... ... //depot/myproject/branch_b/branch_b_file#1 add ... //depot/myproject/branch_b#75 edit ... //depot/myproject/branch_c/branch_c_file#1 add """ p4change = { 3: p4_describe_3, 2: p4_describe_2, 5: p4_describe_4, } class FakeTransport: def __init__(self): self.msg = None def write(self, msg): self.msg = msg def closeStdin(self): pass class TestP4Poller(changesource.ChangeSourceMixin, gpo.GetProcessOutputMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpGetProcessOutput() return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def add_p4_describe_result(self, number, result): self.expectCommands( gpo.Expect('p4', 'describe', '-s', str(number)).stdout(result)) def makeTime(self, timestring): datefmt = '%Y/%m/%d %H:%M:%S' when = datetime.datetime.strptime(timestring, datefmt) return when # tests def test_describe(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.assertSubstring("p4source", self.changesource.describe()) def test_name(self): # no name: cs1 = P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1)) self.assertEqual("P4Source:None://depot/myproject/", cs1.name) # explicit name: cs2 = P4Source(p4port=None, p4user=None, name='MyName', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1)) self.assertEqual("MyName", cs2.name) @defer.inlineCallbacks def do_test_poll_successful(self, **kwargs): encoding = kwargs.get('encoding', 'utf8') self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), **kwargs)) self.expectCommands( gpo.Expect( 'p4', 'changes', '-m', '1', '//depot/myproject/...').stdout(first_p4changes), gpo.Expect( 'p4', 'changes', '//depot/myproject/...@2,#head').stdout(second_p4changes), ) encoded_p4change = p4change.copy() encoded_p4change[3] = encoded_p4change[3].encode(encoding) self.add_p4_describe_result(2, encoded_p4change[2]) self.add_p4_describe_result(3, encoded_p4change[3]) # The first time, it just learns the change to start at. self.assertTrue(self.changesource.last_change is None) yield self.changesource.poll() self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(self.changesource.last_change, 1) # Subsequent times, it returns Change objects for new changes. yield self.changesource.poll() # when_timestamp is converted from a local time spec, so just # replicate that here when1 = self.makeTime("2006/04/13 21:46:23") when2 = self.makeTime("2006/04/13 21:51:39") # these two can happen in either order, since they're from the same # perforce change. changesAdded = self.master.data.updates.changesAdded if changesAdded[1]['branch'] == 'branch_c': changesAdded[1:] = reversed(changesAdded[1:]) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'slamb', 'committer': None, 'branch': 'trunk', 'category': None, 'codebase': None, 'comments': 'creation', 'files': ['whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '2', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when1), }, { 'author': 'bob', 'committer': None, 'branch': 'branch_b', 'category': None, 'codebase': None, 'comments': 'short desc truncated because this is a long description.\n' 'ASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes.', 'files': ['branch_b_file', 'whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '3', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when2), }, { 'author': 'bob', 'committer': None, 'branch': 'branch_c', 'category': None, 'codebase': None, 'comments': 'short desc truncated because this is a long description.\n' 'ASDF-GUI-P3-\u2018Upgrade Icon\u2019 disappears sometimes.', 'files': ['whatbranch'], 'project': '', 'properties': {}, 'repository': '', 'revision': '3', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when2), }]) self.assertAllCommandsRan() def test_poll_successful_default_encoding(self): return self.do_test_poll_successful() def test_poll_successful_macroman_encoding(self): return self.do_test_poll_successful(encoding='macroman') def test_poll_failed_changes(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expectCommands( gpo.Expect('p4', 'changes', '-m', '1', '//depot/myproject/...').stdout(b'Perforce client error:\n...')) # call _poll, so we can catch the failure d = self.changesource._poll() return self.assertFailure(d, P4PollerError) @defer.inlineCallbacks def test_poll_failed_describe(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expectCommands( gpo.Expect( 'p4', 'changes', '//depot/myproject/...@3,#head').stdout(second_p4changes), ) self.add_p4_describe_result(2, p4change[2]) self.add_p4_describe_result(3, b'Perforce client error:\n...') # tell poll() that it's already been called once self.changesource.last_change = 2 # call _poll, so we can catch the failure with self.assertRaises(P4PollerError): yield self.changesource._poll() # check that 2 was processed OK self.assertEqual(self.changesource.last_change, 2) self.assertAllCommandsRan() def test_poll_unicode_error(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1))) self.expectCommands( gpo.Expect( 'p4', 'changes', '//depot/myproject/...@3,#head').stdout(second_p4changes), ) # Add a character which cannot be decoded with utf-8 undecodableText = p4change[2] + b"\x81" self.add_p4_describe_result(2, undecodableText) # tell poll() that it's already been called once self.changesource.last_change = 2 # call _poll, so we can catch the failure d = self.changesource._poll() return self.assertFailure(d, UnicodeError) def test_poll_unicode_error2(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), encoding='ascii')) # Trying to decode a certain character with ascii codec should fail. self.expectCommands( gpo.Expect( 'p4', 'changes', '-m', '1', '//depot/myproject/...').stdout(fourth_p4changes), ) d = self.changesource._poll() return d @defer.inlineCallbacks def test_acquire_ticket_auth(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) self.expectCommands( gpo.Expect('p4', '-P', 'TICKET_ID_GOES_HERE', 'changes', '-m', '1', '//depot/myproject/...').stdout(first_p4changes) ) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'login', b'-p']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'Enter password:\nSuccess: Password verified.\nTICKET_ID_GOES_HERE\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() self.assertEqual( self.changesource._ticket_passwd, 'TICKET_ID_GOES_HERE') @defer.inlineCallbacks def test_acquire_ticket_auth2(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) self.expectCommands( gpo.Expect('p4', '-P', 'TICKET_ID_GOES_HERE', 'changes', '-m', '1', '//depot/myproject/...').stdout(first_p4changes) ) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'login', b'-p']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'Enter password:\nTICKET_ID_GOES_HERE\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() self.assertEqual( self.changesource._ticket_passwd, 'TICKET_ID_GOES_HERE') @defer.inlineCallbacks def test_acquire_ticket_auth2_fail(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) self.expectCommands( gpo.Expect('p4', '-P', None, 'changes', '-m', '1', '//depot/myproject/...').stdout(first_p4changes) ) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'login', b'-p']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'Enter password:\n') pp.errReceived(b"Password invalid.\n'auth-check' validation failed: Incorrect password!\n") so = error.ProcessDone(status=1) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() self.assertEqual( self.changesource._ticket_passwd, None) @defer.inlineCallbacks def test_acquire_ticket_auth_invalid_encoding(self): self.attachChangeSource( P4Source(p4port=None, p4user=None, p4passwd='pass', p4base='//depot/myproject/', split_file=lambda x: x.split('/', 1), use_tickets=True)) transport = FakeTransport() # p4poller uses only those arguments at the moment def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], ['p4', [b'p4', b'login', b'-p']]) pp.makeConnection(transport) self.assertEqual(b'pass\n', transport.msg) pp.outReceived(b'\xff\xff\xff\xff\xff') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) yield self.changesource.poll() errors = self.flushLoggedErrors(P4PollerError) self.assertEqual(len(errors), 1) self.assertIn('\'utf-8\' codec can\'t decode byte 0xff', errors[0].getErrorMessage()) self.assertIn('Failed to parse P4 ticket', errors[0].getErrorMessage()) @defer.inlineCallbacks def test_poll_split_file(self): """Make sure split file works on branch only changes""" self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=get_simple_split)) self.expectCommands( gpo.Expect( 'p4', 'changes', '//depot/myproject/...@51,#head').stdout(third_p4changes), ) self.add_p4_describe_result(5, p4change[5]) self.changesource.last_change = 50 yield self.changesource.poll() # when_timestamp is converted from a local time spec, so just # replicate that here when = self.makeTime("2006/04/13 21:55:39") def changeKey(change): """ Let's sort the array of changes by branch, because in P4Source._poll(), changeAdded() is called by iterating over a dictionary of branches""" return change['branch'] self.assertEqual(sorted(self.master.data.updates.changesAdded, key=changeKey), sorted([{ 'author': 'mpatel', 'committer': None, 'branch': 'branch_c', 'category': None, 'codebase': None, 'comments': 'This is a multiline comment with tabs and spaces\n\nA list:\n Item 1\n\tItem 2', 'files': ['branch_c_file'], 'project': '', 'properties': {}, 'repository': '', 'revision': '5', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when), }, { 'author': 'mpatel', 'committer': None, 'branch': 'branch_b', 'category': None, 'codebase': None, 'comments': 'This is a multiline comment with tabs and spaces\n\nA list:\n Item 1\n\tItem 2', 'files': ['branch_b_file'], 'project': '', 'properties': {}, 'repository': '', 'revision': '5', 'revlink': '', 'src': None, 'when_timestamp': datetime2epoch(when), }], key=changeKey)) self.assertEqual(self.changesource.last_change, 5) self.assertAllCommandsRan() @defer.inlineCallbacks def test_server_tz(self): """Verify that the server_tz parameter is handled correctly""" self.attachChangeSource( P4Source(p4port=None, p4user=None, p4base='//depot/myproject/', split_file=get_simple_split, server_tz="Europe/Berlin")) self.expectCommands( gpo.Expect( 'p4', 'changes', '//depot/myproject/...@51,#head').stdout(third_p4changes), ) self.add_p4_describe_result(5, p4change[5]) self.changesource.last_change = 50 yield self.changesource.poll() # when_timestamp is converted from 21:55:39 Berlin time to UTC when_berlin = self.makeTime("2006/04/13 21:55:39") when_berlin = when_berlin.replace( tzinfo=dateutil.tz.gettz('Europe/Berlin')) when = datetime2epoch(when_berlin) self.assertEqual([ch['when_timestamp'] for ch in self.master.data.updates.changesAdded], [when, when]) self.assertAllCommandsRan() def test_resolveWho_callable(self): with self.assertRaisesConfigError( "You need to provide a valid callable for resolvewho"): P4Source(resolvewho=None) class TestSplit(unittest.TestCase): def test_get_simple_split(self): self.assertEqual(get_simple_split('foo/bar'), ('foo', 'bar')) self.assertEqual(get_simple_split('foo-bar'), (None, None)) self.assertEqual(get_simple_split('/bar'), ('', 'bar')) self.assertEqual(get_simple_split('foo/'), ('foo', '')) buildbot-2.6.0/master/buildbot/test/unit/test_changes_pb.py000066400000000000000000000374051361162603000240710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.changes import pb from buildbot.test.fake import fakemaster from buildbot.test.util import changesource from buildbot.test.util import pbmanager from buildbot.test.util.misc import TestReactorMixin class TestPBChangeSource(changesource.ChangeSourceMixin, pbmanager.PBManagerMixin, TestReactorMixin, unittest.TestCase): DEFAULT_CONFIG = dict(port='9999', user='alice', passwd='sekrit', name=changesource.ChangeSourceMixin.DEFAULT_NAME) EXP_DEFAULT_REGISTRATION = ('9999', 'alice', 'sekrit') @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpPBChangeSource() yield self.setUpChangeSource() self.master.pbmanager = self.pbmanager def test_registration_no_workerport(self): return self._test_registration(None, exp_ConfigErrors=True, user='alice', passwd='sekrit') def test_registration_global_workerport(self): return self._test_registration(self.EXP_DEFAULT_REGISTRATION, **self.DEFAULT_CONFIG) def test_registration_custom_port(self): return self._test_registration(('8888', 'alice', 'sekrit'), user='alice', passwd='sekrit', port='8888') def test_registration_no_userpass(self): return self._test_registration(('9939', 'change', 'changepw'), workerPort='9939') def test_registration_no_userpass_no_global(self): return self._test_registration(None, exp_ConfigErrors=True) def test_no_registration_if_master_already_claimed(self): # claim the CS on another master... self.setChangeSourceToMaster(self.OTHER_MASTER_ID) # and then use the same args as one of the above success cases, # but expect that it will NOT register return self._test_registration(None, **self.DEFAULT_CONFIG) def test_registration_later_if_master_can_do_it(self): # get the changesource running but not active due to the other master self.setChangeSourceToMaster(self.OTHER_MASTER_ID) self.attachChangeSource(pb.PBChangeSource(**self.DEFAULT_CONFIG)) self.startChangeSource() self.assertNotRegistered() # other master goes away self.setChangeSourceToMaster(None) # not quite enough time to cause it to activate self.changesource.clock.advance( self.changesource.POLL_INTERVAL_SEC * 4 / 5) self.assertNotRegistered() # there we go! self.changesource.clock.advance( self.changesource.POLL_INTERVAL_SEC * 2 / 5) self.assertRegistered(*self.EXP_DEFAULT_REGISTRATION) @defer.inlineCallbacks def _test_registration(self, exp_registration, exp_ConfigErrors=False, workerPort=None, **constr_kwargs): cfg = mock.Mock() cfg.protocols = {'pb': {'port': workerPort}} self.attachChangeSource(pb.PBChangeSource(**constr_kwargs)) self.startChangeSource() if exp_ConfigErrors: # if it's not registered, it should raise a ConfigError. try: yield self.changesource.reconfigServiceWithBuildbotConfig(cfg) except config.ConfigErrors: pass else: self.fail("Expected ConfigErrors") else: yield self.changesource.reconfigServiceWithBuildbotConfig(cfg) if exp_registration: self.assertRegistered(*exp_registration) yield self.stopChangeSource() if exp_registration: self.assertUnregistered(*exp_registration) self.assertEqual(self.changesource.registration, None) def test_perspective(self): self.attachChangeSource( pb.PBChangeSource('alice', 'sekrit', port='8888')) persp = self.changesource.getPerspective(mock.Mock(), 'alice') self.assertIsInstance(persp, pb.ChangePerspective) def test_describe(self): cs = pb.PBChangeSource() self.assertSubstring("PBChangeSource", cs.describe()) def test_name(self): cs = pb.PBChangeSource(port=1234) self.assertEqual("PBChangeSource:1234", cs.name) cs = pb.PBChangeSource(port=1234, prefix="pre") self.assertEqual("PBChangeSource:pre:1234", cs.name) # explicit name: cs = pb.PBChangeSource(name="MyName") self.assertEqual("MyName", cs.name) def test_describe_prefix(self): cs = pb.PBChangeSource(prefix="xyz") self.assertSubstring("PBChangeSource", cs.describe()) self.assertSubstring("xyz", cs.describe()) def test_describe_int(self): cs = pb.PBChangeSource(port=9989) self.assertSubstring("PBChangeSource", cs.describe()) @defer.inlineCallbacks def test_reconfigService_no_change(self): config = mock.Mock() self.attachChangeSource(pb.PBChangeSource(port='9876')) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertRegistered('9876', 'change', 'changepw') yield self.stopChangeSource() self.assertUnregistered('9876', 'change', 'changepw') @defer.inlineCallbacks def test_reconfigService_default_changed(self): config = mock.Mock() config.protocols = {'pb': {'port': '9876'}} self.attachChangeSource(pb.PBChangeSource()) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertRegistered('9876', 'change', 'changepw') config.protocols = {'pb': {'port': '1234'}} yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertUnregistered('9876', 'change', 'changepw') self.assertRegistered('1234', 'change', 'changepw') yield self.stopChangeSource() self.assertUnregistered('1234', 'change', 'changepw') @defer.inlineCallbacks def test_reconfigService_default_changed_but_inactive(self): """reconfig one that's not active on this master""" config = mock.Mock() config.protocols = {'pb': {'port': '9876'}} self.attachChangeSource(pb.PBChangeSource()) self.setChangeSourceToMaster(self.OTHER_MASTER_ID) self.startChangeSource() yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertNotRegistered() config.protocols = {'pb': {'port': '1234'}} yield self.changesource.reconfigServiceWithBuildbotConfig(config) self.assertNotRegistered() yield self.stopChangeSource() self.assertNotRegistered() self.assertNotUnregistered() class TestChangePerspective(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) @defer.inlineCallbacks def test_addChange_noprefix(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="bar", files=['a'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_codebase(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="bar", files=[], codebase='cb')) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': 'cb', 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_prefix(self): cp = pb.ChangePerspective(self.master, 'xx/') yield cp.perspective_addChange( dict(who="bar", files=['xx/a', 'yy/b'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'bar', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_sanitize_None(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(project=None, revlink=None, repository=None) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_when_None(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(when=None) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_files_tuple(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange( dict(files=('a', 'b')) ) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': None, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a', 'b'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_unicode(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(author="\N{SNOWMAN}", comments="\N{SNOWMAN}", files=['\N{VERY MUCH GREATER-THAN}'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': '\u2603', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': '\u2603', 'files': ['\u22d9'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_unicode_as_bytestring(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(author="\N{SNOWMAN}".encode('utf8'), comments="\N{SNOWMAN}".encode( 'utf8'), files=['\N{VERY MUCH GREATER-THAN}'.encode('utf8')])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': '\u2603', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': '\u2603', 'files': ['\u22d9'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_non_utf8_bytestring(self): cp = pb.ChangePerspective(self.master, None) bogus_utf8 = b'\xff\xff\xff\xff' replacement = bogus_utf8.decode('utf8', 'replace') yield cp.perspective_addChange(dict(author=bogus_utf8, files=['a'])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': replacement, 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': ['a'], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': None, }]) @defer.inlineCallbacks def test_addChange_old_param_names(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who='me', when=1234, files=[])) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'me', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': None, 'when_timestamp': 1234, }]) @defer.inlineCallbacks def test_createUserObject_git_src(self): cp = pb.ChangePerspective(self.master, None) yield cp.perspective_addChange(dict(who="c ", src='git')) self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'c ', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': None, 'files': [], 'project': '', 'properties': {}, 'repository': '', 'revision': None, 'revlink': '', 'src': 'git', 'when_timestamp': None, }]) buildbot-2.6.0/master/buildbot/test/unit/test_changes_svnpoller.py000066400000000000000000000607651361162603000255210ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import xml.dom.minidom from twisted.internet import defer from twisted.trial import unittest from buildbot.changes import svnpoller from buildbot.process.properties import Interpolate from buildbot.test.util import changesource from buildbot.test.util import gpo from buildbot.test.util.misc import TestReactorMixin # this is the output of "svn info --xml # svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" prefix_output = b"""\ svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk svn+ssh://svn.twistedmatrix.com/svn/Twisted bbbe8e31-12d6-0310-92fd-ac37d47ddeeb jml 2006-10-01T02:37:34.063255Z """ # and this is "svn info --xml svn://svn.twistedmatrix.com/svn/Twisted". I # think this is kind of a degenerate case.. it might even be a form of error. prefix_output_2 = b"""\ """ # this is the svn info output for a local repository, svn info --xml # file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository prefix_output_3 = b"""\ file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f warner 2006-10-01T07:37:04.182499Z """ # % svn info --xml file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk prefix_output_4 = b"""\ file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk file:///home/warner/stuff/Projects/Buildbot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f warner 2006-10-01T07:37:02.286440Z """ # output from svn log on .../SVN-Repository/sample # (so it includes trunk and branches) sample_base = ("file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/" + "_trial_temp/test_vc/repositories/SVN-Repository/sample") sample_logentries = [None] * 6 sample_logentries[5] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/branch/version.c revised_to_2 """ sample_logentries[4] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/branch revised_to_2 """ sample_logentries[3] = b"""\ warner 2006-10-01T19:35:16.165664Z /sample/trunk/version.c revised_to_2 """ sample_logentries[2] = b"""\ warner 2006-10-01T19:35:10.215692Z /sample/branch/c\xcc\xa7main.c commit_on_branch """ sample_logentries[1] = b"""\ warner 2006-10-01T19:35:09.154973Z /sample/branch make_branch """ sample_logentries[0] = b"""\ warner 2006-10-01T19:35:08.642045Z /sample /sample/trunk /sample/trunk/subdir/subdir.c /sample/trunk/main.c /sample/trunk/version.c /sample/trunk/subdir sample_project_files """ sample_info_output = b"""\ file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository 4f94adfc-c41e-0410-92d5-fbf86b7c7689 warner 2006-10-01T19:35:16.165664Z """ def make_changes_output(maxrevision): # return what 'svn log' would have just after the given revision was # committed logs = sample_logentries[0:maxrevision] assert len(logs) == maxrevision logs.reverse() output = (b""" """ + b"".join(logs) + b"") return output def make_logentry_elements(maxrevision): "return the corresponding logentry elements for the given revisions" doc = xml.dom.minidom.parseString(make_changes_output(maxrevision)) return doc.getElementsByTagName("logentry") def split_file(path): pieces = path.split("/") if pieces[0] == "branch": return dict(branch="branch", path="/".join(pieces[1:])) if pieces[0] == "trunk": return dict(path="/".join(pieces[1:])) raise RuntimeError("there shouldn't be any files like %r" % path) class TestSVNPoller(gpo.GetProcessOutputMixin, changesource.ChangeSourceMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpGetProcessOutput() return self.setUpChangeSource() def tearDown(self): return self.tearDownChangeSource() def attachSVNPoller(self, *args, **kwargs): s = svnpoller.SVNPoller(*args, **kwargs) self.attachChangeSource(s) return s def add_svn_command_result(self, command, result): self.expectCommands( gpo.Expect('svn', command).stdout(result)) # tests def test_describe(self): s = self.attachSVNPoller('file://') self.assertSubstring("SVNPoller", s.describe()) def test_name(self): s = self.attachSVNPoller('file://') self.assertEqual("file://", s.name) s = self.attachSVNPoller('file://', name='MyName') self.assertEqual("MyName", s.name) def test_strip_repourl(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" s = self.attachSVNPoller(base + "/") self.assertEqual(s.repourl, base) @defer.inlineCallbacks def do_test_get_prefix(self, base, output, expected): s = self.attachSVNPoller(base) self.expectCommands( gpo.Expect('svn', 'info', '--xml', '--non-interactive', base).stdout(output)) prefix = yield s.get_prefix() self.assertEqual(prefix, expected) self.assertAllCommandsRan() def test_get_prefix_1(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" return self.do_test_get_prefix(base, prefix_output, 'trunk') def test_get_prefix_2(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted" return self.do_test_get_prefix(base, prefix_output_2, '') def test_get_prefix_3(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository") return self.do_test_get_prefix(base, prefix_output_3, '') def test_get_prefix_4(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk") return self.do_test_get_prefix(base, prefix_output_3, 'sample/trunk') def test_log_parsing(self): s = self.attachSVNPoller('file:///foo') output = make_changes_output(4) entries = s.parse_logs(output) # no need for elaborate assertions here; this is minidom's logic self.assertEqual(len(entries), 4) def test_get_new_logentries(self): s = self.attachSVNPoller('file:///foo') entries = make_logentry_elements(4) s.last_change = 4 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 0) s.last_change = 3 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 1) s.last_change = 1 new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 3) # special case: if last_change is None, then no new changes are queued s.last_change = None new = s.get_new_logentries(entries) self.assertEqual(s.last_change, 4) self.assertEqual(len(new), 0) def test_get_text(self): doc = xml.dom.minidom.parseString(""" hi 1 2 """.strip()) s = self.attachSVNPoller('http://', split_file=split_file) self.assertEqual(s._get_text(doc, 'grandchild'), '1') self.assertEqual(s._get_text(doc, 'nonexistent'), 'unknown') def test_create_changes(self): base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample") s = self.attachSVNPoller(base, split_file=split_file) s._prefix = "sample" logentries = dict( zip(range(1, 7), reversed(make_logentry_elements(6)))) changes = s.create_changes(reversed([logentries[3], logentries[2]])) self.assertEqual(len(changes), 2) # note that parsing occurs in reverse self.assertEqual(changes[0]['branch'], "branch") self.assertEqual(changes[0]['revision'], '2') self.assertEqual(changes[0]['project'], '') self.assertEqual(changes[0]['repository'], base) self.assertEqual(changes[1]['branch'], "branch") self.assertEqual(changes[1]['files'], ["çmain.c"]) self.assertEqual(changes[1]['revision'], '3') self.assertEqual(changes[1]['project'], '') self.assertEqual(changes[1]['repository'], base) changes = s.create_changes([logentries[4]]) self.assertEqual(len(changes), 1) self.assertEqual(changes[0]['branch'], None) self.assertEqual(changes[0]['revision'], '4') self.assertEqual(changes[0]['files'], ["version.c"]) # r5 should *not* create a change as it's a branch deletion changes = s.create_changes([logentries[5]]) self.assertEqual(len(changes), 0) # r6 should create a change as it's not deleting an entire branch changes = s.create_changes([logentries[6]]) self.assertEqual(len(changes), 1) self.assertEqual(changes[0]['branch'], 'branch') self.assertEqual(changes[0]['revision'], '6') self.assertEqual(changes[0]['files'], ["version.c"]) def makeInfoExpect(self, password='bbrocks'): args = ['svn', 'info', '--xml', '--non-interactive', sample_base, '--username=dustin'] if password is not None: args.append('--password=' + password) return gpo.Expect(*args) def makeLogExpect(self, password='bbrocks'): args = ['svn', 'log', '--xml', '--verbose', '--non-interactive', '--username=dustin'] if password is not None: args.append('--password=' + password) args.extend(['--limit=100', sample_base]) return gpo.Expect(*args) def test_create_changes_overridden_project(self): def custom_split_file(path): f = split_file(path) if f: f["project"] = "overridden-project" f["repository"] = "overridden-repository" f["codebase"] = "overridden-codebase" return f base = ("file:///home/warner/stuff/Projects/Buildbot/trees/" + "svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample") s = self.attachSVNPoller(base, split_file=custom_split_file) s._prefix = "sample" logentries = dict( zip(range(1, 7), reversed(make_logentry_elements(6)))) changes = s.create_changes(reversed([logentries[3], logentries[2]])) self.assertEqual(len(changes), 2) # note that parsing occurs in reverse self.assertEqual(changes[0]['branch'], "branch") self.assertEqual(changes[0]['revision'], '2') self.assertEqual(changes[0]['project'], "overridden-project") self.assertEqual(changes[0]['repository'], "overridden-repository") self.assertEqual(changes[0]['codebase'], "overridden-codebase") self.assertEqual(changes[1]['branch'], "branch") self.assertEqual(changes[1]['files'], ['çmain.c']) self.assertEqual(changes[1]['revision'], '3') self.assertEqual(changes[1]['project'], "overridden-project") self.assertEqual(changes[1]['repository'], "overridden-repository") self.assertEqual(changes[1]['codebase'], "overridden-codebase") @defer.inlineCallbacks def test_poll(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') self.expectCommands( self.makeInfoExpect().stdout(sample_info_output), self.makeLogExpect().stdout(make_changes_output(1)), self.makeLogExpect().stdout(make_changes_output(1)), self.makeLogExpect().stdout(make_changes_output(2)), self.makeLogExpect().stdout(make_changes_output(4)), ) # fire it the first time; it should do nothing yield s.poll() # no changes generated on the first iteration self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(s.last_change, 1) # now fire it again, nothing changing yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, []) self.assertEqual(s.last_change, 1) # and again, with r2 this time yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'warner', 'committer': None, 'branch': 'branch', 'category': None, 'codebase': None, 'comments': 'make_branch', 'files': [''], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', 'revision': '2', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) self.assertEqual(s.last_change, 2) # and again with both r3 and r4 appearing together self.master.data.updates.changesAdded = [] yield s.poll() self.assertEqual(self.master.data.updates.changesAdded, [{ 'author': 'warner', 'committer': None, 'branch': 'branch', 'category': None, 'codebase': None, 'comments': 'commit_on_branch', 'files': ['çmain.c'], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', 'revision': '3', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }, { 'author': 'warner', 'committer': None, 'branch': None, 'category': None, 'codebase': None, 'comments': 'revised_to_2', 'files': ['version.c'], 'project': '', 'properties': {}, 'repository': 'file:///usr/home/warner/stuff/Projects/Buildbot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample', 'revision': '4', 'revlink': '', 'src': 'svn', 'when_timestamp': None, }]) self.assertEqual(s.last_change, 4) self.assertAllCommandsRan() def test_poll_empty_password(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='') self.expectCommands( self.makeInfoExpect(password="").stdout(sample_info_output), self.makeLogExpect(password="").stdout(make_changes_output(1)), self.makeLogExpect(password="").stdout(make_changes_output(1)), self.makeLogExpect(password="").stdout(make_changes_output(2)), self.makeLogExpect(password="").stdout(make_changes_output(4)), ) s.poll() def test_poll_no_password(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin') self.expectCommands( self.makeInfoExpect(password=None).stdout(sample_info_output), self.makeLogExpect(password=None).stdout(make_changes_output(1)), self.makeLogExpect(password=None).stdout(make_changes_output(1)), self.makeLogExpect(password=None).stdout(make_changes_output(2)), self.makeLogExpect(password=None).stdout(make_changes_output(4)), ) s.poll() def test_poll_interpolated_password(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd=Interpolate('pa$$')) self.expectCommands( self.makeInfoExpect(password='pa$$').stdout(sample_info_output), self.makeLogExpect(password='pa$$').stdout(make_changes_output(1)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(1)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(2)), self.makeLogExpect(password='pa$$').stdout(make_changes_output(4)), ) s.poll() @defer.inlineCallbacks def test_poll_get_prefix_exception(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') self.expectCommands( self.makeInfoExpect().stderr(b"error")) yield s.poll() # should have logged the RuntimeError, but not errback'd from poll self.assertEqual(len(self.flushLoggedErrors(IOError)), 1) self.assertAllCommandsRan() @defer.inlineCallbacks def test_poll_get_logs_exception(self): s = self.attachSVNPoller(sample_base, split_file=split_file, svnuser='dustin', svnpasswd='bbrocks') s._prefix = "abc" # skip the get_prefix stuff self.expectCommands( self.makeLogExpect().stderr(b"some error")) yield s.poll() # should have logged the RuntimeError, but not errback'd from poll self.assertEqual(len(self.flushLoggedErrors(IOError)), 1) self.assertAllCommandsRan() def test_cachepath_empty(self): cachepath = os.path.abspath('revcache') if os.path.exists(cachepath): os.unlink(cachepath) s = self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, None) def test_cachepath_full(self): cachepath = os.path.abspath('revcache') with open(cachepath, "w") as f: f.write('33') s = self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, 33) s.last_change = 44 s.finished_ok(None) with open(cachepath) as f: self.assertEqual(f.read().strip(), '44') def test_cachepath_bogus(self): cachepath = os.path.abspath('revcache') with open(cachepath, "w") as f: f.write('nine') s = self.attachSVNPoller(sample_base, cachepath=cachepath) self.assertEqual(s.last_change, None) self.assertEqual(s.cachepath, None) # it should have called log.err once with a ValueError self.assertEqual(len(self.flushLoggedErrors(ValueError)), 1) def test_constructor_pollinterval(self): self.attachSVNPoller(sample_base, pollinterval=100) # just don't fail! def test_extra_args(self): extra_args = ['--no-auth-cache', ] base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" s = self.attachSVNPoller(repourl=base, extra_args=extra_args) self.assertEqual(s.extra_args, extra_args) def test_use_svnurl(self): base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk" with self.assertRaises(TypeError): self.attachSVNPoller(svnurl=base) class TestSplitFile(unittest.TestCase): def test_split_file_alwaystrunk(self): self.assertEqual( svnpoller.split_file_alwaystrunk('foo'), dict(path='foo')) def test_split_file_branches_trunk(self): self.assertEqual( svnpoller.split_file_branches('trunk/'), (None, '')) def test_split_file_branches_trunk_subdir(self): self.assertEqual( svnpoller.split_file_branches('trunk/subdir/'), (None, 'subdir/')) def test_split_file_branches_trunk_subfile(self): self.assertEqual( svnpoller.split_file_branches('trunk/subdir/file.c'), (None, 'subdir/file.c')) def test_split_file_branches_trunk_invalid(self): # file named trunk (not a directory): self.assertEqual( svnpoller.split_file_branches('trunk'), None) def test_split_file_branches_branch(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/'), ('branches/1.5.x', '')) def test_split_file_branches_branch_subdir(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/subdir/'), ('branches/1.5.x', 'subdir/')) def test_split_file_branches_branch_subfile(self): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x/subdir/file.c'), ('branches/1.5.x', 'subdir/file.c')) def test_split_file_branches_branch_invalid(self): # file named branches/1.5.x (not a directory): self.assertEqual( svnpoller.split_file_branches('branches/1.5.x'), None) def test_split_file_branches_otherdir(self): # other dirs are ignored: self.assertEqual( svnpoller.split_file_branches('tags/testthis/subdir/'), None) def test_split_file_branches_otherfile(self): # other files are ignored: self.assertEqual( svnpoller.split_file_branches('tags/testthis/subdir/file.c'), None) def test_split_file_projects_branches(self): self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/trunk/subdir/file.c'), dict(project='buildbot', path='subdir/file.c')) self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/branches/1.5.x/subdir/file.c'), dict(project='buildbot', branch='branches/1.5.x', path='subdir/file.c')) # tags are ignored: self.assertEqual( svnpoller.split_file_projects_branches( 'buildbot/tags/testthis/subdir/file.c'), None) buildbot-2.6.0/master/buildbot/test/unit/test_clients_sendchange.py000066400000000000000000000224361361162603000256160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.spread import pb from twisted.trial import unittest from buildbot.clients import sendchange class Sender(unittest.TestCase): def setUp(self): # patch out some PB components and make up some mocks self.patch(pb, 'PBClientFactory', self._fake_PBClientFactory) self.patch(reactor, 'connectTCP', self._fake_connectTCP) self.factory = mock.Mock(name='PBClientFactory') self.factory.login = self._fake_login self.factory.login_d = defer.Deferred() self.remote = mock.Mock(name='PB Remote') self.remote.callRemote = self._fake_callRemote self.remote.broker.transport.loseConnection = self._fake_loseConnection # results self.creds = None self.conn_host = self.conn_port = None self.lostConnection = False self.added_changes = [] self.vc_used = None def _fake_PBClientFactory(self): return self.factory def _fake_login(self, creds): self.creds = creds return self.factory.login_d def _fake_connectTCP(self, host, port, factory): self.conn_host = host self.conn_port = port self.assertIdentical(factory, self.factory) self.factory.login_d.callback(self.remote) def _fake_callRemote(self, method, change): self.assertEqual(method, 'addChange') self.added_changes.append(change) return defer.succeed(None) def _fake_loseConnection(self): self.lostConnection = True def assertProcess(self, host, port, username, password, changes): self.assertEqual([host, port, username, password, changes], [self.conn_host, self.conn_port, self.creds.username, self.creds.password, self.added_changes]) @defer.inlineCallbacks def test_send_minimal(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a']) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_auth(self): s = sendchange.Sender('localhost:1234', auth=('me', 'sekrit')) yield s.send('branch', 'rev', 'comm', ['a']) self.assertProcess('localhost', 1234, b'me', b'sekrit', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_full(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a'], who='me', category='cats', when=1234, properties={'a': 'b'}, repository='r', vc='git', project='p', revlink='rl') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='p', repository='r', who='me', files=['a'], comments='comm', branch='branch', revision='rev', category='cats', when=1234, properties={'a': 'b'}, revlink='rl', src='git')]) @defer.inlineCallbacks def test_send_files_tuple(self): # 'buildbot sendchange' sends files as a tuple, rather than a list.. s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ('a', 'b')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a', 'b'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None)]) @defer.inlineCallbacks def test_send_codebase(self): s = sendchange.Sender('localhost:1234') yield s.send('branch', 'rev', 'comm', ['a'], codebase='mycb') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='', repository='', who=None, files=['a'], comments='comm', branch='branch', revision='rev', category=None, when=None, properties={}, revlink='', src=None, codebase='mycb')]) @defer.inlineCallbacks def test_send_unicode(self): s = sendchange.Sender('localhost:1234') yield s.send('\N{DEGREE SIGN}', '\U0001f49e', '\N{POSTAL MARK FACE}', ['\U0001F4C1'], project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', category='\U0001F640', when=1234, properties={'\N{LATIN SMALL LETTER A WITH MACRON}': 'b'}, revlink='\U0001F517') self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', files=['\U0001F4C1'], # FILE FOLDER comments='\N{POSTAL MARK FACE}', branch='\N{DEGREE SIGN}', revision='\U0001f49e', # REVOLVING HEARTS category='\U0001F640', # WEARY CAT FACE when=1234, properties={'\N{LATIN SMALL LETTER A WITH MACRON}': 'b'}, revlink='\U0001F517', # LINK SYMBOL src=None)]) @defer.inlineCallbacks def test_send_unicode_utf8(self): s = sendchange.Sender('localhost:1234') yield s.send('\N{DEGREE SIGN}'.encode('utf8'), '\U0001f49e'.encode('utf8'), '\N{POSTAL MARK FACE}'.encode('utf8'), ['\U0001F4C1'.encode('utf8')], project='\N{SKULL AND CROSSBONES}'.encode('utf8'), repository='\N{SNOWMAN}'.encode('utf8'), who='\N{THAI CHARACTER KHOMUT}'.encode('utf8'), category='\U0001F640'.encode('utf8'), when=1234, properties={ '\N{LATIN SMALL LETTER A WITH MACRON}'.encode('utf8'): 'b'}, revlink='\U0001F517'.encode('utf8')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{SKULL AND CROSSBONES}', repository='\N{SNOWMAN}', who='\N{THAI CHARACTER KHOMUT}', files=['\U0001F4C1'], # FILE FOLDER comments='\N{POSTAL MARK FACE}', branch='\N{DEGREE SIGN}', revision='\U0001f49e', # REVOLVING HEARTS category='\U0001F640', # WEARY CAT FACE when=1234, # NOTE: not decoded! properties={b'\xc4\x81': 'b'}, revlink='\U0001F517', # LINK SYMBOL src=None)]) @defer.inlineCallbacks def test_send_unicode_latin1(self): # hand send() a bunch of latin1 strings, and expect them recoded # to unicode s = sendchange.Sender('localhost:1234', encoding='latin1') yield s.send('\N{YEN SIGN}'.encode('latin1'), '\N{POUND SIGN}'.encode('latin1'), '\N{BROKEN BAR}'.encode('latin1'), ['\N{NOT SIGN}'.encode('latin1')], project='\N{DEGREE SIGN}'.encode('latin1'), repository='\N{SECTION SIGN}'.encode('latin1'), who='\N{MACRON}'.encode('latin1'), category='\N{PILCROW SIGN}'.encode('latin1'), when=1234, properties={ '\N{SUPERSCRIPT ONE}'.encode('latin1'): 'b'}, revlink='\N{INVERTED QUESTION MARK}'.encode('latin1')) self.assertProcess('localhost', 1234, b'change', b'changepw', [ dict(project='\N{DEGREE SIGN}', repository='\N{SECTION SIGN}', who='\N{MACRON}', files=['\N{NOT SIGN}'], comments='\N{BROKEN BAR}', branch='\N{YEN SIGN}', revision='\N{POUND SIGN}', category='\N{PILCROW SIGN}', when=1234, # NOTE: not decoded! properties={b'\xb9': 'b'}, revlink='\N{INVERTED QUESTION MARK}', src=None)]) buildbot-2.6.0/master/buildbot/test/unit/test_clients_tryclient.py000066400000000000000000000161041361162603000255270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sys from twisted.python.compat import unicode from twisted.trial import unittest from buildbot.clients import tryclient class createJobfile(unittest.TestCase): def makeNetstring(self, *strings): return ''.join(['%d:%s,' % (len(s), s) for s in strings]) # version 1 is deprecated and not produced by the try client def test_createJobfile_v2_one_builder(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = None comment = None builderNames = ['runtests'] properties = {} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '2', jobid, branch, baserev, str(patch_level), patch_body, repository, project, builderNames[0]) self.assertEqual(job, jobstr) def test_createJobfile_v2_two_builders(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = None comment = None builderNames = ['runtests', 'moretests'] properties = {} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '2', jobid, branch, baserev, str(patch_level), patch_body, repository, project, builderNames[0], builderNames[1]) self.assertEqual(job, jobstr) def test_createJobfile_v3(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = 'someuser' comment = None builderNames = ['runtests'] properties = {} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '3', jobid, branch, baserev, str(patch_level), patch_body, repository, project, who, builderNames[0]) self.assertEqual(job, jobstr) def test_createJobfile_v4(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = 'someuser' comment = 'insightful comment' builderNames = ['runtests'] properties = {} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '4', jobid, branch, baserev, str(patch_level), patch_body, repository, project, who, comment, builderNames[0]) self.assertEqual(job, jobstr) def test_createJobfile_v5(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = 'someuser' comment = 'insightful comment' builderNames = ['runtests'] properties = {'foo': 'bar'} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': baserev, 'patch_level': patch_level, 'patch_body': patch_body, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, })) self.assertEqual(job, jobstr) def test_SourceStampExtractor_readPatch(self): sse = tryclient.GitExtractor(None, None, None) for patchlevel, diff in enumerate((None, "", b"")): sse.readPatch(diff, patchlevel) self.assertEqual(sse.patch, (patchlevel, None)) sse.readPatch(b"diff schmiff blah blah blah", 23) self.assertEqual(sse.patch, (23, "diff schmiff blah blah blah")) def test_GitExtractor_fixBranch(self): sse = tryclient.GitExtractor(None, "origin/master", None) self.assertEqual(sse.branch, "origin/master") sse.fixBranch(b'origi\n') self.assertEqual(sse.branch, "origin/master") sse.fixBranch(b'origin\n') self.assertEqual(sse.branch, "master") def test_GitExtractor_override_baserev(self): sse = tryclient.GitExtractor(None, None, None) sse.override_baserev(b"23ae367063327b79234e081f396ecbc\n") self.assertEqual(sse.baserev, "23ae367063327b79234e081f396ecbc") class RemoteTryPP_TestStream(object): def __init__(self): self.writes = [] self.is_open = True def write(self, data): assert self.is_open self.writes.append(data) def closeStdin(self): assert self.is_open self.is_open = False def test_RemoteTryPP_encoding(self): rmt = tryclient.RemoteTryPP("job") self.assertTrue(isinstance(rmt.job, unicode)) rmt.transport = self.RemoteTryPP_TestStream() rmt.connectionMade() self.assertFalse(rmt.transport.is_open) self.assertEqual(len(rmt.transport.writes), 1) self.assertFalse(isinstance(rmt.transport.writes[0], unicode)) for streamname in "out", "err": sys_streamattr = "std" + streamname rmt_methodattr = streamname + "Received" teststream = self.RemoteTryPP_TestStream() saved_stream = getattr(sys, sys_streamattr) try: setattr(sys, sys_streamattr, teststream) getattr(rmt, rmt_methodattr)(b"data") finally: setattr(sys, sys_streamattr, saved_stream) self.assertEqual(len(teststream.writes), 1) self.assertTrue(isinstance(teststream.writes[0], unicode)) buildbot-2.6.0/master/buildbot/test/unit/test_clients_usersclient.py000066400000000000000000000065211361162603000260540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.spread import pb from twisted.trial import unittest from buildbot.clients import usersclient class TestUsersClient(unittest.TestCase): def setUp(self): # patch out some PB components and make up some mocks self.patch(pb, 'PBClientFactory', self._fake_PBClientFactory) self.patch(reactor, 'connectTCP', self._fake_connectTCP) self.factory = mock.Mock(name='PBClientFactory') self.factory.login = self._fake_login self.factory.login_d = defer.Deferred() self.remote = mock.Mock(name='PB Remote') self.remote.callRemote = self._fake_callRemote self.remote.broker.transport.loseConnection = self._fake_loseConnection # results self.conn_host = self.conn_port = None self.lostConnection = False def _fake_PBClientFactory(self): return self.factory def _fake_login(self, creds): return self.factory.login_d def _fake_connectTCP(self, host, port, factory): self.conn_host = host self.conn_port = port self.assertIdentical(factory, self.factory) self.factory.login_d.callback(self.remote) def _fake_callRemote(self, method, op, bb_username, bb_password, ids, info): self.assertEqual(method, 'commandline') self.called_with = dict(op=op, bb_username=bb_username, bb_password=bb_password, ids=ids, info=info) return defer.succeed(None) def _fake_loseConnection(self): self.lostConnection = True def assertProcess(self, host, port, called_with): self.assertEqual([host, port, called_with], [self.conn_host, self.conn_port, self.called_with]) @defer.inlineCallbacks def test_usersclient_info(self): uc = usersclient.UsersClient('localhost', "user", "userpw", 1234) yield uc.send('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x', 'svn': 'x'}]) self.assertProcess('localhost', 1234, dict(op='update', bb_username='bb_user', bb_password='hashed_bb_pass', ids=None, info=[dict(identifier='x', svn='x')])) @defer.inlineCallbacks def test_usersclient_ids(self): uc = usersclient.UsersClient('localhost', "user", "userpw", 1234) yield uc.send('remove', None, None, ['x'], None) self.assertProcess('localhost', 1234, dict(op='remove', bb_username=None, bb_password=None, ids=['x'], info=None)) buildbot-2.6.0/master/buildbot/test/unit/test_config.py000066400000000000000000001742471361162603000232530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # We cannot use the builtins module here from Python-Future. # We need to use the native __builtin__ module on Python 2, # and builtins module on Python 3, because we need to override # the actual native open method. import os import re import textwrap import mock from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import config from buildbot import configurators from buildbot import interfaces from buildbot import locks from buildbot import revlinks from buildbot import worker from buildbot.changes import base as changes_base from buildbot.process import factory from buildbot.process import properties from buildbot.schedulers import base as schedulers_base from buildbot.status import base as status_base from buildbot.test.util import dirs from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import service from buildbot.worker_transition import DeprecatedWorkerAPIWarning try: # Python 2 import __builtin__ as builtins except ImportError: # Python 3 import builtins global_defaults = dict( title='Buildbot', titleURL='http://buildbot.net', buildbotURL='http://localhost:8080/', logCompressionLimit=4096, logCompressionMethod='gz', logEncoding='utf-8', logMaxTailSize=None, logMaxSize=None, properties=properties.Properties(), collapseRequests=None, prioritizeBuilders=None, protocols={}, multiMaster=False, manhole=None, buildbotNetUsageData=None, # in unit tests we default to None, but normally defaults to 'basic' www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log'), ) class FakeChangeSource(changes_base.ChangeSource): def __init__(self): super().__init__(name='FakeChangeSource') class FakeStatusReceiver(status_base.StatusReceiver): pass @implementer(interfaces.IScheduler) class FakeScheduler: def __init__(self, name): self.name = name class FakeBuilder: def __init__(self, **kwargs): self.__dict__.update(kwargs) @implementer(interfaces.IWorker) class FakeWorker: def __init__(self, **kwargs): self.__dict__.update(kwargs) @implementer(interfaces.IMachine) class FakeMachine: def __init__(self, **kwargs): self.__dict__.update(kwargs) class ConfigErrors(unittest.TestCase): def test_constr(self): ex = config.ConfigErrors(['a', 'b']) self.assertEqual(ex.errors, ['a', 'b']) def test_addError(self): ex = config.ConfigErrors(['a']) ex.addError('c') self.assertEqual(ex.errors, ['a', 'c']) def test_nonempty(self): empty = config.ConfigErrors() full = config.ConfigErrors(['a']) self.assertTrue(not empty) self.assertFalse(not full) def test_error_raises(self): e = self.assertRaises(config.ConfigErrors, config.error, "message") self.assertEqual(e.errors, ["message"]) def test_error_no_raise(self): e = config.ConfigErrors() self.patch(config, "_errors", e) config.error("message") self.assertEqual(e.errors, ["message"]) def test_str(self): ex = config.ConfigErrors() self.assertEqual(str(ex), "") ex = config.ConfigErrors(["a"]) self.assertEqual(str(ex), "a") ex = config.ConfigErrors(["a", "b"]) self.assertEqual(str(ex), "a\nb") ex = config.ConfigErrors(["a"]) ex.addError('c') self.assertEqual(str(ex), "a\nc") class ConfigLoaderTests(ConfigErrorsMixin, dirs.DirsMixin, unittest.SynchronousTestCase): def setUp(self): self.basedir = os.path.abspath('basedir') self.filename = os.path.join(self.basedir, 'test.cfg') self.patch(config, "_in_unit_tests", False) return self.setUpDirs('basedir') def tearDown(self): return self.tearDownDirs() def install_config_file(self, config_file, other_files=None): if other_files is None: other_files = {} config_file = textwrap.dedent(config_file) with open(os.path.join(self.basedir, self.filename), "w") as f: f.write(config_file) for file, contents in other_files.items(): with open(file, "w") as f: f.write(contents) def test_loadConfig_missing_file(self): with self.assertRaisesConfigError( re.compile("configuration file .* does not exist")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_missing_basedir(self): with self.assertRaisesConfigError( re.compile("basedir .* does not exist")): config.loadConfigDict(os.path.join(self.basedir, 'NO'), 'test.cfg') def test_loadConfig_open_error(self): """ Check that loadConfig() raises correct ConfigError exception in cases when configure file is found, but we fail to open it. """ def raise_IOError(*args): raise IOError("error_msg") self.install_config_file('#dummy') # override build-in open() function to always rise IOError self.patch(builtins, "open", raise_IOError) # check that we got the expected ConfigError exception with self.assertRaisesConfigError( re.compile("unable to open configuration file .*: error_msg")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_parse_error(self): self.install_config_file('def x:\nbar') with self.assertRaisesConfigError(re.compile( "encountered a SyntaxError while parsing config file:")): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_eval_ConfigError(self): self.install_config_file("""\ from buildbot import config BuildmasterConfig = { 'multiMaster': True } config.error('oh noes!')""") with self.assertRaisesConfigError("oh noes"): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_eval_otherError(self): self.install_config_file("""\ from buildbot import config BuildmasterConfig = { 'multiMaster': True } raise ValueError('oh noes')""") with self.assertRaisesConfigError( "error while parsing config file: oh noes (traceback in logfile)"): config.loadConfigDict(self.basedir, self.filename) [error] = self.flushLoggedErrors(ValueError) self.assertEqual(error.value.args, ("oh noes",)) def test_loadConfig_no_BuildmasterConfig(self): self.install_config_file('x=10') with self.assertRaisesConfigError( "does not define 'BuildmasterConfig'"): config.loadConfigDict(self.basedir, self.filename) def test_loadConfig_with_local_import(self): self.install_config_file("""\ from subsidiary_module import x BuildmasterConfig = dict(x=x) """, {'basedir/subsidiary_module.py': "x = 10"}) _, rv = config.loadConfigDict(self.basedir, self.filename) self.assertEqual(rv, {'x': 10}) class MasterConfig(ConfigErrorsMixin, dirs.DirsMixin, unittest.TestCase): maxDiff = None def setUp(self): self.basedir = os.path.abspath('basedir') self.filename = os.path.join(self.basedir, 'test.cfg') return self.setUpDirs('basedir') def tearDown(self): return self.tearDownDirs() # utils def patch_load_helpers(self): # patch out all of the "helpers" for loadConfig with null functions for n in dir(config.MasterConfig): if n.startswith('load_'): typ = 'loader' elif n.startswith('check_'): typ = 'checker' else: continue v = getattr(config.MasterConfig, n) if callable(v): if typ == 'loader': self.patch(config.MasterConfig, n, mock.Mock(side_effect=lambda filename, config_dict: None)) else: self.patch(config.MasterConfig, n, mock.Mock(side_effect=lambda: None)) def install_config_file(self, config_file, other_files=None): if other_files is None: other_files = {} config_file = textwrap.dedent(config_file) with open(os.path.join(self.basedir, self.filename), "w") as f: f.write(config_file) for file, contents in other_files.items(): with open(file, "w") as f: f.write(contents) # tests def test_defaults(self): cfg = config.MasterConfig() expected = dict( # validation, db=dict( db_url='sqlite:///state.sqlite'), mq=dict(type='simple'), metrics=None, caches=dict(Changes=10, Builds=15), schedulers={}, builders=[], workers=[], change_sources=[], status=[], user_managers=[], revlink=revlinks.default_revlink_matcher ) expected.update(global_defaults) expected['buildbotNetUsageData'] = 'basic' got = { attr: getattr(cfg, attr) for attr, exp in expected.items()} got = interfaces.IConfigured(got).getConfigDict() expected = interfaces.IConfigured(expected).getConfigDict() self.assertEqual(got, expected) def test_defaults_validation(self): # re's aren't comparable, but we can make sure the keys match cfg = config.MasterConfig() self.assertEqual(sorted(cfg.validation.keys()), sorted([ 'branch', 'revision', 'property_name', 'property_value', ])) def test_loadConfig_eval_ConfigErrors(self): # We test a config that has embedded errors, as well # as semantic errors that get added later. If an exception is raised # prematurely, then the semantic errors wouldn't get reported. self.install_config_file("""\ from buildbot import config BuildmasterConfig = {} config.error('oh noes!') config.error('noes too!')""") e = self.assertRaises(config.ConfigErrors, config.FileLoader(self.basedir, self.filename).loadConfig) self.assertEqual(e.errors, ['oh noes!', 'noes too!', 'no workers are configured', 'no builders are configured']) def test_loadConfig_unknown_key(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict(foo=10) """) with self.assertRaisesConfigError("Unknown BuildmasterConfig key foo"): config.FileLoader(self.basedir, self.filename).loadConfig() def test_loadConfig_unknown_keys(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict(foo=10, bar=20) """) with self.assertRaisesConfigError( "Unknown BuildmasterConfig keys bar, foo"): config.FileLoader(self.basedir, self.filename).loadConfig() def test_loadConfig_success(self): self.patch_load_helpers() self.install_config_file("""\ BuildmasterConfig = dict() """) rv = config.FileLoader(self.basedir, self.filename).loadConfig() self.assertIsInstance(rv, config.MasterConfig) # make sure all of the loaders and checkers are called self.assertTrue(rv.load_global.called) self.assertTrue(rv.load_validation.called) self.assertTrue(rv.load_db.called) self.assertTrue(rv.load_metrics.called) self.assertTrue(rv.load_caches.called) self.assertTrue(rv.load_schedulers.called) self.assertTrue(rv.load_builders.called) self.assertTrue(rv.load_workers.called) self.assertTrue(rv.load_change_sources.called) self.assertTrue(rv.load_machines.called) self.assertTrue(rv.load_user_managers.called) self.assertTrue(rv.check_single_master.called) self.assertTrue(rv.check_schedulers.called) self.assertTrue(rv.check_locks.called) self.assertTrue(rv.check_builders.called) self.assertTrue(rv.check_ports.called) self.assertTrue(rv.check_machines.called) def test_preChangeGenerator(self): cfg = config.MasterConfig() self.assertEqual({ 'author': None, 'files': None, 'comments': None, 'revision': None, 'when_timestamp': None, 'branch': None, 'category': None, 'revlink': '', 'properties': {}, 'repository': '', 'project': '', 'codebase': None}, cfg.preChangeGenerator()) class MasterConfig_loaders(ConfigErrorsMixin, unittest.TestCase): filename = 'test.cfg' def setUp(self): self.cfg = config.MasterConfig() self.errors = config.ConfigErrors() self.patch(config, '_errors', self.errors) # utils def assertResults(self, **expected): self.assertFalse(self.errors, self.errors.errors) got = { attr: getattr(self.cfg, attr) for attr, exp in expected.items()} got = interfaces.IConfigured(got).getConfigDict() expected = interfaces.IConfigured(expected).getConfigDict() self.assertEqual(got, expected) # tests def test_load_global_defaults(self): self.maxDiff = None self.cfg.load_global(self.filename, {}) self.assertResults(**global_defaults) def test_load_global_string_param_not_string(self): self.cfg.load_global(self.filename, dict(title=10)) self.assertConfigError(self.errors, 'must be a string') def test_load_global_int_param_not_int(self): self.cfg.load_global(self.filename, dict(changeHorizon='yes')) self.assertConfigError(self.errors, 'must be an int') def test_load_global_protocols_not_dict(self): self.cfg.load_global(self.filename, dict(protocols="test")) self.assertConfigError(self.errors, "c['protocols'] must be dict") def test_load_global_protocols_key_int(self): self.cfg.load_global(self.filename, dict(protocols={321: {"port": 123}})) self.assertConfigError( self.errors, "c['protocols'] keys must be strings") def test_load_global_protocols_value_not_dict(self): self.cfg.load_global(self.filename, dict(protocols={"pb": 123})) self.assertConfigError( self.errors, "c['protocols']['pb'] must be a dict") def do_test_load_global(self, config_dict, **expected): self.cfg.load_global(self.filename, config_dict) self.assertResults(**expected) def test_load_global_title(self): self.do_test_load_global(dict(title='hi'), title='hi') def test_load_global_title_too_long(self): with assertProducesWarning(config.ConfigWarning, message_pattern=r"Title is too long"): self.do_test_load_global(dict(title="Very very very very very long title")) def test_load_global_projectURL(self): self.do_test_load_global(dict(projectName='hey'), title='hey') def test_load_global_titleURL(self): self.do_test_load_global(dict(titleURL='hi'), titleURL='hi') def test_load_global_buildbotURL(self): self.do_test_load_global(dict(buildbotURL='hey'), buildbotURL='hey') def test_load_global_changeHorizon(self): self.do_test_load_global(dict(changeHorizon=10), changeHorizon=10) def test_load_global_changeHorizon_none(self): self.do_test_load_global(dict(changeHorizon=None), changeHorizon=None) def test_load_global_eventHorizon(self): with assertProducesWarning( config.ConfigWarning, message_pattern=r"`eventHorizon` is deprecated and ignored"): self.do_test_load_global( dict(eventHorizon=10)) def test_load_global_status(self): with assertProducesWarning( config.ConfigWarning, message_pattern=r"`status` targets are deprecated and ignored"): self.do_test_load_global( dict(status=[])) def test_load_global_buildbotNetUsageData(self): self.patch(config, "_in_unit_tests", False) with assertProducesWarning( config.ConfigWarning, message_pattern=r"`buildbotNetUsageData` is not configured and defaults to basic."): self.do_test_load_global( dict()) def test_load_global_logCompressionLimit(self): self.do_test_load_global(dict(logCompressionLimit=10), logCompressionLimit=10) def test_load_global_logCompressionMethod(self): self.do_test_load_global(dict(logCompressionMethod='bz2'), logCompressionMethod='bz2') def test_load_global_logCompressionMethod_invalid(self): self.cfg.load_global(self.filename, dict(logCompressionMethod='foo')) self.assertConfigError( self.errors, "c['logCompressionMethod'] must be 'raw', 'bz2', 'gz' or 'lz4'") def test_load_global_codebaseGenerator(self): func = lambda _: "dummy" self.do_test_load_global(dict(codebaseGenerator=func), codebaseGenerator=func) def test_load_global_codebaseGenerator_invalid(self): self.cfg.load_global(self.filename, dict(codebaseGenerator='dummy')) self.assertConfigError(self.errors, "codebaseGenerator must be a callable " "accepting a dict and returning a str") def test_load_global_logMaxSize(self): self.do_test_load_global(dict(logMaxSize=123), logMaxSize=123) def test_load_global_logMaxTailSize(self): self.do_test_load_global(dict(logMaxTailSize=123), logMaxTailSize=123) def test_load_global_logEncoding(self): self.do_test_load_global( dict(logEncoding='latin-2'), logEncoding='latin-2') def test_load_global_properties(self): exp = properties.Properties() exp.setProperty('x', 10, self.filename) self.do_test_load_global(dict(properties=dict(x=10)), properties=exp) def test_load_global_properties_invalid(self): self.cfg.load_global(self.filename, dict(properties='yes')) self.assertConfigError(self.errors, "must be a dictionary") def test_load_global_collapseRequests_bool(self): self.do_test_load_global(dict(collapseRequests=False), collapseRequests=False) def test_load_global_collapseRequests_callable(self): callable = lambda: None self.do_test_load_global(dict(collapseRequests=callable), collapseRequests=callable) def test_load_global_collapseRequests_invalid(self): self.cfg.load_global(self.filename, dict(collapseRequests='yes')) self.assertConfigError(self.errors, "must be a callable, True, or False") def test_load_global_prioritizeBuilders_callable(self): callable = lambda: None self.do_test_load_global(dict(prioritizeBuilders=callable), prioritizeBuilders=callable) def test_load_global_prioritizeBuilders_invalid(self): self.cfg.load_global(self.filename, dict(prioritizeBuilders='yes')) self.assertConfigError(self.errors, "must be a callable") def test_load_global_protocols_str(self): self.do_test_load_global(dict(protocols={'pb': {'port': 'udp:123'}}), protocols={'pb': {'port': 'udp:123'}}) def test_load_global_multiMaster(self): self.do_test_load_global(dict(multiMaster=1), multiMaster=1) def test_load_global_manhole(self): mh = mock.Mock(name='manhole') self.do_test_load_global(dict(manhole=mh), manhole=mh) def test_load_global_revlink_callable(self): callable = lambda: None self.do_test_load_global(dict(revlink=callable), revlink=callable) def test_load_global_revlink_invalid(self): self.cfg.load_global(self.filename, dict(revlink='')) self.assertConfigError(self.errors, "must be a callable") def test_load_validation_defaults(self): self.cfg.load_validation(self.filename, {}) self.assertEqual(sorted(self.cfg.validation.keys()), sorted([ 'branch', 'revision', 'property_name', 'property_value', ])) def test_load_validation_invalid(self): self.cfg.load_validation(self.filename, dict(validation='plz')) self.assertConfigError(self.errors, "must be a dictionary") def test_load_validation_unk_keys(self): self.cfg.load_validation(self.filename, dict(validation=dict(users='.*'))) self.assertConfigError(self.errors, "unrecognized validation key(s)") def test_load_validation(self): r = re.compile('.*') self.cfg.load_validation(self.filename, dict(validation=dict(branch=r))) self.assertEqual(self.cfg.validation['branch'], r) # check that defaults are still around self.assertIn('revision', self.cfg.validation) def test_load_db_defaults(self): self.cfg.load_db(self.filename, {}) self.assertResults( db=dict(db_url='sqlite:///state.sqlite')) def test_load_db_db_url(self): self.cfg.load_db(self.filename, dict(db_url='abcd')) self.assertResults(db=dict(db_url='abcd')) def test_load_db_db_poll_interval(self): # value is ignored, but no error with assertProducesWarning( config.ConfigWarning, message_pattern=r"db_poll_interval is deprecated and will be ignored"): self.cfg.load_db(self.filename, dict(db_poll_interval=2)) self.assertResults( db=dict(db_url='sqlite:///state.sqlite')) def test_load_db_dict(self): # db_poll_interval value is ignored, but no error with assertProducesWarning( config.ConfigWarning, message_pattern=r"db_poll_interval is deprecated and will be ignored"): self.cfg.load_db(self.filename, dict(db=dict(db_url='abcd', db_poll_interval=10))) self.assertResults(db=dict(db_url='abcd')) def test_load_db_unk_keys(self): with assertProducesWarning( config.ConfigWarning, message_pattern=r"db_poll_interval is deprecated and will be ignored"): self.cfg.load_db(self.filename, dict(db=dict(db_url='abcd', db_poll_interval=10, bar='bar'))) self.assertConfigError(self.errors, "unrecognized keys in") def test_load_mq_defaults(self): self.cfg.load_mq(self.filename, {}) self.assertResults(mq=dict(type='simple')) def test_load_mq_explicit_type(self): self.cfg.load_mq(self.filename, dict(mq=dict(type='simple'))) self.assertResults(mq=dict(type='simple')) def test_load_mq_unk_type(self): self.cfg.load_mq(self.filename, dict(mq=dict(type='foo'))) self.assertConfigError(self.errors, "mq type 'foo' is not known") def test_load_mq_unk_keys(self): self.cfg.load_mq(self.filename, dict(mq=dict(bar='bar'))) self.assertConfigError(self.errors, "unrecognized keys in") def test_load_metrics_defaults(self): self.cfg.load_metrics(self.filename, {}) self.assertResults(metrics=None) def test_load_metrics_invalid(self): self.cfg.load_metrics(self.filename, dict(metrics=13)) self.assertConfigError(self.errors, "must be a dictionary") def test_load_metrics(self): self.cfg.load_metrics(self.filename, dict(metrics=dict(foo=1))) self.assertResults(metrics=dict(foo=1)) def test_load_caches_defaults(self): self.cfg.load_caches(self.filename, {}) self.assertResults(caches=dict(Changes=10, Builds=15)) def test_load_caches_invalid(self): self.cfg.load_caches(self.filename, dict(caches=13)) self.assertConfigError(self.errors, "must be a dictionary") def test_load_caches_buildCacheSize(self): self.cfg.load_caches(self.filename, dict(buildCacheSize=13)) self.assertResults(caches=dict(Builds=13, Changes=10)) def test_load_caches_buildCacheSize_and_caches(self): self.cfg.load_caches(self.filename, dict(buildCacheSize=13, caches=dict(builds=11))) self.assertConfigError(self.errors, "cannot specify") def test_load_caches_changeCacheSize(self): self.cfg.load_caches(self.filename, dict(changeCacheSize=13)) self.assertResults(caches=dict(Changes=13, Builds=15)) def test_load_caches_changeCacheSize_and_caches(self): self.cfg.load_caches(self.filename, dict(changeCacheSize=13, caches=dict(changes=11))) self.assertConfigError(self.errors, "cannot specify") def test_load_caches(self): self.cfg.load_caches(self.filename, dict(caches=dict(foo=1))) self.assertResults(caches=dict(Changes=10, Builds=15, foo=1)) def test_load_caches_not_int_err(self): """ Test that non-integer cache sizes are not allowed. """ self.cfg.load_caches(self.filename, dict(caches=dict(foo="1"))) self.assertConfigError(self.errors, "value for cache size 'foo' must be an integer") def test_load_caches_to_small_err(self): """ Test that cache sizes less then 1 are not allowed. """ self.cfg.load_caches(self.filename, dict(caches=dict(Changes=-12))) self.assertConfigError(self.errors, "'Changes' cache size must be at least 1, got '-12'") def test_load_schedulers_defaults(self): self.cfg.load_schedulers(self.filename, {}) self.assertResults(schedulers={}) def test_load_schedulers_not_list(self): self.cfg.load_schedulers(self.filename, dict(schedulers=dict())) self.assertConfigError(self.errors, "must be a list of") def test_load_schedulers_not_instance(self): self.cfg.load_schedulers(self.filename, dict(schedulers=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_schedulers_dupe(self): sch1 = FakeScheduler(name='sch') sch2 = FakeScheduler(name='sch') self.cfg.load_schedulers(self.filename, dict(schedulers=[sch1, sch2])) self.assertConfigError(self.errors, "scheduler name 'sch' used multiple times") def test_load_schedulers(self): sch = schedulers_base.BaseScheduler('sch', [""]) self.cfg.load_schedulers(self.filename, dict(schedulers=[sch])) self.assertResults(schedulers=dict(sch=sch)) def test_load_builders_defaults(self): self.cfg.load_builders(self.filename, {}) self.assertResults(builders=[]) def test_load_builders_not_list(self): self.cfg.load_builders(self.filename, dict(builders=dict())) self.assertConfigError(self.errors, "must be a list") def test_load_builders_not_instance(self): self.cfg.load_builders(self.filename, dict(builders=[mock.Mock()])) self.assertConfigError( self.errors, "is not a builder config (in c['builders']") def test_load_builders(self): bldr = config.BuilderConfig(name='x', factory=factory.BuildFactory(), workername='x') self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertResults(builders=[bldr]) def test_load_builders_dict(self): bldr = dict(name='x', factory=factory.BuildFactory(), workername='x') self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertIsInstance(self.cfg.builders[0], config.BuilderConfig) self.assertEqual(self.cfg.builders[0].name, 'x') def test_load_builders_abs_builddir(self): bldr = dict(name='x', factory=factory.BuildFactory(), workername='x', builddir=os.path.abspath('.')) self.cfg.load_builders(self.filename, dict(builders=[bldr])) self.assertEqual( len(self.flushWarnings([self.cfg.load_builders])), 1) def test_load_workers_defaults(self): self.cfg.load_workers(self.filename, {}) self.assertResults(workers=[]) def test_load_workers_not_list(self): self.cfg.load_workers(self.filename, dict(workers=dict())) self.assertConfigError(self.errors, "must be a list") def test_load_workers_not_instance(self): self.cfg.load_workers(self.filename, dict(workers=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_workers_reserved_names(self): for name in 'debug', 'change', 'status': self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is reserved") self.errors.errors[:] = [] # clear out the errors def test_load_workers_not_identifiers(self): for name in ("123 no initial digits", "spaces not allowed", 'a/b', "a.b.c.d", "a-b_c.d9",): self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is not an identifier") self.errors.errors[:] = [] # clear out the errors def test_load_workers_too_long(self): name = "a" * 51 self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.assertConfigError(self.errors, "is longer than") self.errors.errors[:] = [] # clear out the errors def test_load_workers_empty(self): name = "" self.cfg.load_workers(self.filename, dict(workers=[worker.Worker(name, 'x')])) self.errors.errors[:] = self.errors.errors[ 1:2] # only get necessary error self.assertConfigError(self.errors, "cannot be an empty string") self.errors.errors[:] = [] # clear out the errors def test_load_workers(self): wrk = worker.Worker('foo', 'x') self.cfg.load_workers(self.filename, dict(workers=[wrk])) self.assertResults(workers=[wrk]) def test_load_change_sources_defaults(self): self.cfg.load_change_sources(self.filename, {}) self.assertResults(change_sources=[]) def test_load_change_sources_not_instance(self): self.cfg.load_change_sources(self.filename, dict(change_source=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_change_sources_single(self): chsrc = FakeChangeSource() self.cfg.load_change_sources(self.filename, dict(change_source=chsrc)) self.assertResults(change_sources=[chsrc]) def test_load_change_sources_list(self): chsrc = FakeChangeSource() self.cfg.load_change_sources(self.filename, dict(change_source=[chsrc])) self.assertResults(change_sources=[chsrc]) def test_load_machines_defaults(self): self.cfg.load_machines(self.filename, {}) self.assertResults(machines=[]) def test_load_machines_not_instance(self): self.cfg.load_machines(self.filename, dict(machines=[mock.Mock()])) self.assertConfigError(self.errors, "must be a list of") def test_load_machines_single(self): mm = FakeMachine(name='a') self.cfg.load_machines(self.filename, dict(machines=mm)) self.assertConfigError(self.errors, "must be a list of") def test_load_machines_list(self): mm = FakeMachine() self.cfg.load_machines(self.filename, dict(machines=[mm])) self.assertResults(machines=[mm]) def test_load_user_managers_defaults(self): self.cfg.load_user_managers(self.filename, {}) self.assertResults(user_managers=[]) def test_load_user_managers_not_list(self): self.cfg.load_user_managers(self.filename, dict(user_managers='foo')) self.assertConfigError(self.errors, "must be a list") def test_load_user_managers(self): um = mock.Mock() self.cfg.load_user_managers(self.filename, dict(user_managers=[um])) self.assertResults(user_managers=[um]) def test_load_www_default(self): self.cfg.load_www(self.filename, {}) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_port(self): self.cfg.load_www(self.filename, dict(www=dict(port=9888))) self.assertResults(www=dict(port=9888, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_plugin(self): self.cfg.load_www(self.filename, dict(www=dict(plugins={'waterfall': {'foo': 'bar'}}))) self.assertResults(www=dict(port=None, plugins={'waterfall': {'foo': 'bar'}}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_allowed_origins(self): self.cfg.load_www(self.filename, dict(www=dict(allowed_origins=['a', 'b']))) self.assertResults(www=dict(port=None, allowed_origins=['a', 'b'], plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http.log')) def test_load_www_logfileName(self): self.cfg.load_www(self.filename, dict(www=dict(logfileName='http-access.log'))) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, logfileName='http-access.log')) def test_load_www_versions(self): custom_versions = [ ('Test Custom Component', '0.0.1'), ('Test Custom Component 2', '0.1.0'), ] self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertResults(www=dict(port=None, plugins={}, auth={'name': 'NoAuth'}, authz={}, avatar_methods={'name': 'gravatar'}, versions=custom_versions, logfileName='http.log')) def test_load_www_versions_not_list(self): custom_versions = { 'Test Custom Component': '0.0.1', 'Test Custom Component 2': '0.0.2', } self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertConfigError( self.errors, 'Invalid www configuration value of versions') def test_load_www_versions_value_invalid(self): custom_versions = [('a', '1'), 'abc', ('b',)] self.cfg.load_www( self.filename, {'www': dict(versions=custom_versions)}) self.assertConfigError( self.errors, 'Invalid www configuration value of versions') def test_load_www_cookie_expiration_time_not_timedelta(self): self.cfg.load_www( self.filename, {'www': dict(cookie_expiration_time=1)}) self.assertConfigError( self.errors, 'Invalid www["cookie_expiration_time"]') def test_load_www_unknown(self): self.cfg.load_www(self.filename, dict(www=dict(foo="bar"))) self.assertConfigError(self.errors, "unknown www configuration parameter(s) foo") def test_load_services_nominal(self): class MyService(service.BuildbotService): def reconfigService(foo=None): self.foo = foo myService = MyService(foo="bar", name="foo") self.cfg.load_services(self.filename, dict( services=[myService])) self.assertResults(services={"foo": myService}) def test_load_services_badservice(self): class MyService: pass myService = MyService() self.cfg.load_services(self.filename, dict( services=[myService])) errMsg = (".MyService'> ") errMsg += "object should be an instance of buildbot.util.service.BuildbotService" self.assertConfigError(self.errors, errMsg) def test_load_services_duplicate(self): class MyService(service.BuildbotService): name = 'myservice' def reconfigService(self, x=None): self.x = x self.cfg.load_services(self.filename, dict( services=[MyService(x='a'), MyService(x='b')])) self.assertConfigError( self.errors, 'Duplicate service name %r' % MyService.name) def test_load_configurators_norminal(self): class MyConfigurator(configurators.ConfiguratorBase): def configure(self, config_dict): config_dict['foo'] = 'bar' c = dict(configurators=[MyConfigurator()]) self.cfg.run_configurators(self.filename, c) self.assertEqual(c['foo'], 'bar') class MasterConfig_checkers(ConfigErrorsMixin, unittest.TestCase): def setUp(self): self.cfg = config.MasterConfig() self.errors = config.ConfigErrors() self.patch(config, '_errors', self.errors) # utils def setup_basic_attrs(self): # set up a basic config for checking; this will be modified below sch = mock.Mock() sch.name = 'sch' sch.listBuilderNames = lambda: ['b1', 'b2'] b1 = mock.Mock() b1.name = 'b1' b2 = mock.Mock() b2.name = 'b2' self.cfg.schedulers = dict(sch=sch) self.cfg.workers = [mock.Mock()] self.cfg.builders = [b1, b2] def setup_builder_locks(self, builder_lock=None, dup_builder_lock=False, bare_builder_lock=False): """Set-up two mocked builders with specified locks. @type builder_lock: string or None @param builder_lock: Name of the lock to add to first builder. If None, no lock is added. @type dup_builder_lock: boolean @param dup_builder_lock: if True, add a lock with duplicate name to the second builder @type dup_builder_lock: boolean @param bare_builder_lock: if True, add bare lock objects, don't wrap them into locks.LockAccess object """ def bldr(name): b = mock.Mock() b.name = name b.locks = [] b.factory.steps = [('cls', (), dict(locks=[]))] return b def lock(name): lock = locks.MasterLock(name) if bare_builder_lock: return lock return locks.LockAccess(lock, "counting", _skipChecks=True) b1, b2 = bldr('b1'), bldr('b2') self.cfg.builders = [b1, b2] if builder_lock: b1.locks.append(lock(builder_lock)) if dup_builder_lock: b2.locks.append(lock(builder_lock)) # tests def test_check_single_master_multimaster(self): self.cfg.multiMaster = True self.cfg.check_single_master() self.assertNoConfigErrors(self.errors) def test_check_single_master_no_builders(self): self.setup_basic_attrs() self.cfg.builders = [] self.cfg.check_single_master() self.assertConfigError(self.errors, "no builders are configured") def test_check_single_master_no_workers(self): self.setup_basic_attrs() self.cfg.workers = [] self.cfg.check_single_master() self.assertConfigError(self.errors, "no workers are configured") def test_check_single_master_unsch_builder(self): self.setup_basic_attrs() b3 = mock.Mock() b3.name = 'b3' self.cfg.builders.append(b3) self.cfg.check_single_master() self.assertConfigError(self.errors, "have no schedulers to drive them") def test_check_single_master_renderable_builderNames(self): self.setup_basic_attrs() b3 = mock.Mock() b3.name = 'b3' self.cfg.builders.append(b3) sch2 = mock.Mock() sch2.listBuilderNames = lambda: properties.Interpolate('%(prop:foo)s') self.cfg.schedulers['sch2'] = sch2 self.cfg.check_single_master() self.assertNoConfigErrors(self.errors) def test_check_schedulers_unknown_builder(self): self.setup_basic_attrs() del self.cfg.builders[1] # remove b2, leaving b1 self.cfg.check_schedulers() self.assertConfigError(self.errors, "Unknown builder 'b2'") def test_check_schedulers_ignored_in_multiMaster(self): self.setup_basic_attrs() del self.cfg.builders[1] # remove b2, leaving b1 self.cfg.multiMaster = True self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_schedulers_renderable_builderNames(self): self.setup_basic_attrs() sch2 = mock.Mock() sch2.listBuilderNames = lambda: properties.Interpolate('%(prop:foo)s') self.cfg.schedulers['sch2'] = sch2 self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_schedulers(self): self.setup_basic_attrs() self.cfg.check_schedulers() self.assertNoConfigErrors(self.errors) def test_check_locks_dup_builder_lock(self): self.setup_builder_locks(builder_lock='l', dup_builder_lock=True) self.cfg.check_locks() self.assertConfigError(self.errors, "Two locks share") def test_check_locks(self): self.setup_builder_locks(builder_lock='bl') self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_locks_none(self): # no locks in the whole config, should be fine self.setup_builder_locks() self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_locks_bare(self): # check_locks() should be able to handle bare lock object, # lock objects that are not wrapped into LockAccess() object self.setup_builder_locks(builder_lock='oldlock', bare_builder_lock=True) self.cfg.check_locks() self.assertNoConfigErrors(self.errors) def test_check_builders_unknown_worker(self): wrk = mock.Mock() wrk.workername = 'xyz' self.cfg.workers = [wrk] b1 = FakeBuilder(workernames=['xyz', 'abc'], builddir='x', name='b1') self.cfg.builders = [b1] self.cfg.check_builders() self.assertConfigError(self.errors, "builder 'b1' uses unknown workers 'abc'") def test_check_builders_duplicate_name(self): b1 = FakeBuilder(workernames=[], name='b1', builddir='1') b2 = FakeBuilder(workernames=[], name='b1', builddir='2') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertConfigError(self.errors, "duplicate builder name 'b1'") def test_check_builders_duplicate_builddir(self): b1 = FakeBuilder(workernames=[], name='b1', builddir='dir') b2 = FakeBuilder(workernames=[], name='b2', builddir='dir') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertConfigError(self.errors, "duplicate builder builddir 'dir'") def test_check_builders(self): wrk = mock.Mock() wrk.workername = 'a' self.cfg.workers = [wrk] b1 = FakeBuilder(workernames=['a'], name='b1', builddir='dir1') b2 = FakeBuilder(workernames=['a'], name='b2', builddir='dir2') self.cfg.builders = [b1, b2] self.cfg.check_builders() self.assertNoConfigErrors(self.errors) def test_check_ports_protocols_set(self): self.cfg.protocols = {"pb": {"port": 10}} self.cfg.check_ports() self.assertNoConfigErrors(self.errors) def test_check_ports_protocols_not_set_workers(self): self.cfg.workers = [mock.Mock()] self.cfg.check_ports() self.assertConfigError(self.errors, "workers are configured, but c['protocols'] not") def test_check_ports_protocols_port_duplication(self): self.cfg.protocols = {"pb": {"port": 123}, "amp": {"port": 123}} self.cfg.check_ports() self.assertConfigError(self.errors, "Some of ports in c['protocols'] duplicated") def test_check_machines_unknown_name(self): self.cfg.workers = [ FakeWorker(name='wa', machine_name='unk') ] self.cfg.machines = [ FakeMachine(name='a') ] self.cfg.check_machines() self.assertConfigError(self.errors, 'uses unknown machine') def test_check_machines_duplicate_name(self): self.cfg.machines = [ FakeMachine(name='a'), FakeMachine(name='a') ] self.cfg.check_machines() self.assertConfigError(self.errors, 'duplicate machine name') class MasterConfig_old_worker_api(unittest.TestCase): filename = "test.cfg" def setUp(self): self.cfg = config.MasterConfig() def test_workers_new_api(self): with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): self.assertEqual(self.cfg.workers, []) class BuilderConfig(ConfigErrorsMixin, unittest.TestCase): factory = factory.BuildFactory() # utils def assertAttributes(self, cfg, **expected): got = { attr: getattr(cfg, attr) for attr, exp in expected.items()} self.assertEqual(got, expected) # tests def test_no_name(self): with self.assertRaisesConfigError("builder's name is required"): config.BuilderConfig(factory=self.factory, workernames=['a']) def test_reserved_name(self): with self.assertRaisesConfigError( "builder names must not start with an underscore: '_a'"): config.BuilderConfig(name='_a', factory=self.factory, workernames=['a']) def test_utf8_name(self): with self.assertRaisesConfigError( "builder names must be unicode or ASCII"): config.BuilderConfig(name="\N{SNOWMAN}".encode('utf-8'), factory=self.factory, workernames=['a']) def test_no_factory(self): with self.assertRaisesConfigError("builder 'a' has no factory"): config.BuilderConfig(name='a', workernames=['a']) def test_wrong_type_factory(self): with self.assertRaisesConfigError("builder 'a's factory is not"): config.BuilderConfig(factory=[], name='a', workernames=['a']) def test_no_workernames(self): with self.assertRaisesConfigError( "builder 'a': at least one workername is required"): config.BuilderConfig(name='a', factory=self.factory) def test_bogus_workernames(self): with self.assertRaisesConfigError( "workernames must be a list or a string"): config.BuilderConfig(name='a', workernames={1: 2}, factory=self.factory) def test_bogus_workername(self): with self.assertRaisesConfigError("workername must be a string"): config.BuilderConfig(name='a', workername=1, factory=self.factory) def test_bogus_category(self): with assertProducesWarning( config.ConfigWarning, message_pattern=r"builder categories are deprecated and should be replaced with"): with self.assertRaisesConfigError("category must be a string"): config.BuilderConfig(category=13, name='a', workernames=['a'], factory=self.factory) def test_tags_must_be_list(self): with self.assertRaisesConfigError("tags must be a list"): config.BuilderConfig(tags='abc', name='a', workernames=['a'], factory=self.factory) def test_tags_must_be_list_of_str(self): with self.assertRaisesConfigError( "tags list contains something that is not a string"): config.BuilderConfig(tags=['abc', 13], name='a', workernames=['a'], factory=self.factory) def test_tags_no_tag_dupes(self): with self.assertRaisesConfigError( "builder 'a': tags list contains duplicate tags: abc"): config.BuilderConfig(tags=['abc', 'bca', 'abc'], name='a', workernames=['a'], factory=self.factory) def test_tags_no_categories_too(self): with self.assertRaisesConfigError( "categories are deprecated and replaced by tags; you should only specify tags"): config.BuilderConfig(tags=['abc'], category='def', name='a', workernames=['a'], factory=self.factory) def test_inv_nextWorker(self): with self.assertRaisesConfigError("nextWorker must be a callable"): config.BuilderConfig(nextWorker="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_nextBuild(self): with self.assertRaisesConfigError("nextBuild must be a callable"): config.BuilderConfig(nextBuild="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_canStartBuild(self): with self.assertRaisesConfigError("canStartBuild must be a callable"): config.BuilderConfig(canStartBuild="foo", name="a", workernames=['a'], factory=self.factory) def test_inv_env(self): with self.assertRaisesConfigError("builder's env must be a dictionary"): config.BuilderConfig(env="foo", name="a", workernames=['a'], factory=self.factory) def test_defaults(self): cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory) self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='a b c', workernames=['a'], builddir='a_b_c', workerbuilddir='a_b_c', tags=[], nextWorker=None, locks=[], env={}, properties={}, collapseRequests=None, description=None) def test_unicode_name(self): cfg = config.BuilderConfig( name='a \N{SNOWMAN} c', workername='a', factory=self.factory) self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='a \N{SNOWMAN} c') def test_args(self): cfg = config.BuilderConfig( name='b', workername='s1', workernames='s2', builddir='bd', workerbuilddir='wbd', factory=self.factory, tags=['c'], nextWorker=lambda: 'ns', nextBuild=lambda: 'nb', locks=['l'], env=dict(x=10), properties=dict(y=20), collapseRequests='cr', description='buzz') self.assertIdentical(cfg.factory, self.factory) self.assertAttributes(cfg, name='b', workernames=['s2', 's1'], builddir='bd', workerbuilddir='wbd', tags=['c'], locks=['l'], env={'x': 10}, properties={'y': 20}, collapseRequests='cr', description='buzz') def test_getConfigDict(self): ns = lambda: 'ns' nb = lambda: 'nb' cfg = config.BuilderConfig( name='b', workername='s1', workernames='s2', builddir='bd', workerbuilddir='wbd', factory=self.factory, tags=['c'], nextWorker=ns, nextBuild=nb, locks=['l'], env=dict(x=10), properties=dict(y=20), collapseRequests='cr', description='buzz') self.assertEqual(cfg.getConfigDict(), {'builddir': 'bd', 'tags': ['c'], 'description': 'buzz', 'env': {'x': 10}, 'factory': self.factory, 'locks': ['l'], 'collapseRequests': 'cr', 'name': 'b', 'nextBuild': nb, 'nextWorker': ns, 'properties': {'y': 20}, 'workerbuilddir': 'wbd', 'workernames': ['s2', 's1'], }) def test_getConfigDict_collapseRequests(self): for cr in (False, lambda a, b, c: False): cfg = config.BuilderConfig(name='b', collapseRequests=cr, factory=self.factory, workername='s1') self.assertEqual(cfg.getConfigDict(), {'builddir': 'b', 'collapseRequests': cr, 'name': 'b', 'workerbuilddir': 'b', 'factory': self.factory, 'workernames': ['s1'], }) def test_init_workername_keyword(self): cfg = config.BuilderConfig(name='a b c', workername='a', factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workername_positional(self): with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cfg = config.BuilderConfig( 'a b c', 'a', factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workernames_keyword(self): cfg = config.BuilderConfig(name='a b c', workernames=['a'], factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workernames_positional(self): with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cfg = config.BuilderConfig( 'a b c', None, ['a'], factory=self.factory) self.assertEqual(cfg.workernames, ['a']) def test_init_workerbuilddir_keyword(self): cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory, workerbuilddir="dir") self.assertEqual(cfg.workerbuilddir, 'dir') def test_init_workerbuilddir_positional(self): with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cfg = config.BuilderConfig( 'a b c', 'a', None, None, 'dir', factory=self.factory) self.assertEqual(cfg.workerbuilddir, 'dir') def test_init_next_worker_keyword(self): f = lambda: None cfg = config.BuilderConfig( name='a b c', workername='a', factory=self.factory, nextWorker=f) self.assertEqual(cfg.nextWorker, f) def test_init_next_worker_positional(self): f = lambda: None with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cfg = config.BuilderConfig( 'a b c', 'a', None, None, None, self.factory, None, None, f) self.assertEqual(cfg.nextWorker, f) class FakeService(service.ReconfigurableServiceMixin, service.AsyncService): succeed = True call_index = 1 @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): self.called = FakeService.call_index FakeService.call_index += 1 yield super().reconfigServiceWithBuildbotConfig(new_config) if not self.succeed: raise ValueError("oh noes") class FakeMultiService(service.ReconfigurableServiceMixin, service.AsyncMultiService): def reconfigServiceWithBuildbotConfig(self, new_config): self.called = True d = super().reconfigServiceWithBuildbotConfig(new_config) return d class ReconfigurableServiceMixin(unittest.TestCase): @defer.inlineCallbacks def test_service(self): svc = FakeService() yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) self.assertTrue(svc.called) @defer.inlineCallbacks def test_service_failure(self): svc = FakeService() svc.succeed = False try: yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) except ValueError: pass else: self.fail("should have raised ValueError") @defer.inlineCallbacks def test_multiservice(self): svc = FakeMultiService() ch1 = FakeService() yield ch1.setServiceParent(svc) ch2 = FakeMultiService() yield ch2.setServiceParent(svc) ch3 = FakeService() yield ch3.setServiceParent(ch2) yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) self.assertTrue(svc.called) self.assertTrue(ch1.called) self.assertTrue(ch2.called) self.assertTrue(ch3.called) @defer.inlineCallbacks def test_multiservice_priority(self): parent = FakeMultiService() svc128 = FakeService() yield svc128.setServiceParent(parent) services = [svc128] for i in range(20, 1, -1): svc = FakeService() svc.reconfig_priority = i yield svc.setServiceParent(parent) services.append(svc) yield parent.reconfigServiceWithBuildbotConfig(mock.Mock()) prio_order = [s.called for s in services] called_order = sorted(prio_order) self.assertEqual(prio_order, called_order) @defer.inlineCallbacks def test_multiservice_nested_failure(self): svc = FakeMultiService() ch1 = FakeService() yield ch1.setServiceParent(svc) ch1.succeed = False try: yield svc.reconfigServiceWithBuildbotConfig(mock.Mock()) except ValueError: pass else: self.fail("should have raised ValueError") buildbot-2.6.0/master/buildbot/test/unit/test_configurator_base.py000066400000000000000000000023531361162603000254660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.configurators import ConfiguratorBase from buildbot.test.util import configurators class ConfiguratorBaseTests(configurators.ConfiguratorMixin, unittest.SynchronousTestCase): ConfiguratorClass = ConfiguratorBase def test_basic(self): self.setupConfigurator() self.assertEqual(self.config_dict, { 'schedulers': [], 'protocols': {}, 'workers': [], 'builders': [] }) self.assertEqual(self.configurator.workers, []) buildbot-2.6.0/master/buildbot/test/unit/test_contrib_buildbot_cvs_mail.py000066400000000000000000000203701361162603000271720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from twisted.internet import utils from twisted.python import log from twisted.trial import unittest from buildbot.test.util.misc import encodeExecutableAndArgs test = ''' Update of /cvsroot/test In directory example:/tmp/cvs-serv21085 Modified Files: README hello.c Log Message: two files checkin ''' golden_1_11_regex = [ '^From:', '^To: buildbot@example.com$', '^Reply-To: noreply@example.com$', '^Subject: cvs update for project test$', '^Date:', '^X-Mailer: Python buildbot-cvs-mail', '^$', '^Cvsmode: 1.11$', '^Category: None', '^CVSROOT: \"ext:example:/cvsroot\"', '^Files: test README 1.1,1.2 hello.c 2.2,2.3$', '^Project: test$', '^$', '^Update of /cvsroot/test$', '^In directory example:/tmp/cvs-serv21085$', '^$', '^Modified Files:$', 'README hello.c$', 'Log Message:$', '^two files checkin', '^$', '^$'] golden_1_12_regex = [ '^From: ', '^To: buildbot@example.com$', '^Reply-To: noreply@example.com$', '^Subject: cvs update for project test$', '^Date: ', '^X-Mailer: Python buildbot-cvs-mail', '^$', '^Cvsmode: 1.12$', '^Category: None$', '^CVSROOT: \"ext:example.com:/cvsroot\"$', '^Files: README 1.1 1.2 hello.c 2.2 2.3$', '^Path: test$', '^Project: test$', '^$', '^Update of /cvsroot/test$', '^In directory example:/tmp/cvs-serv21085$', '^$', '^Modified Files:$', 'README hello.c$', '^Log Message:$', 'two files checkin', '^$', '^$'] class _SubprocessProtocol(protocol.ProcessProtocol): def __init__(self, input, deferred): if isinstance(input, str): input = input.encode('utf-8') self.input = input self.deferred = deferred self.output = b'' def outReceived(self, s): self.output += s errReceived = outReceived def connectionMade(self): # push the input and send EOF self.transport.write(self.input) self.transport.closeStdin() def processEnded(self, reason): self.deferred.callback((self.output, reason.value.exitCode)) def getProcessOutputAndValueWithInput(executable, args, input): "similar to getProcessOutputAndValue, but also allows injection of input on stdin" d = defer.Deferred() p = _SubprocessProtocol(input, d) (executable, args) = encodeExecutableAndArgs(executable, args) reactor.spawnProcess(p, executable, (executable,) + tuple(args)) return d class TestBuildbotCvsMail(unittest.TestCase): buildbot_cvs_mail_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../contrib/buildbot_cvs_mail.py')) if not os.path.exists(buildbot_cvs_mail_path): skip = ("'%s' does not exist (normal unless run from git)" % buildbot_cvs_mail_path) def assertOutputOk(self, result, regexList): "assert that the output from getProcessOutputAndValueWithInput matches expectations" (output, code) = result if isinstance(output, bytes): output = output.decode("utf-8") try: self.assertEqual(code, 0, "subprocess exited uncleanly") lines = output.splitlines() self.assertEqual(len(lines), len(regexList), "got wrong number of lines of output") misses = [] for line, regex in zip(lines, regexList): m = re.search(regex, line) if not m: misses.append((regex, line)) self.assertEqual(misses, [], "got non-matching lines") except Exception: log.msg("got output:\n" + output) raise def test_buildbot_cvs_mail_from_cvs1_11(self): # Simulate CVS 1.11 executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '-R', 'noreply@example.com', '-t', 'test', 'README', '1.1,1.2', 'hello.c', '2.2,2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = getProcessOutputAndValueWithInput(executable, args, input=test) d.addCallback(self.assertOutputOk, golden_1_11_regex) return d def test_buildbot_cvs_mail_from_cvs1_12(self): # Simulate CVS 1.12, with --path option executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example.com:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = getProcessOutputAndValueWithInput(executable, args, input=test) d.addCallback(self.assertOutputOk, golden_1_12_regex) return d def test_buildbot_cvs_mail_no_args_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_without_email_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--cvsroot=\"ext:example.com:/cvsroot\"', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_without_cvsroot_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--complete-garbage-opt=gomi', '--cvsroot=\"ext:example.com:/cvsroot\"', '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d def test_buildbot_cvs_mail_with_unknown_opt_exits_with_error(self): executable = sys.executable args = [self.buildbot_cvs_mail_path, '--email=buildbot@example.com', '-P', 'test', '--path', 'test', '-R', 'noreply@example.com', '-t', 'README', '1.1', '1.2', 'hello.c', '2.2', '2.3'] (executable, args) = encodeExecutableAndArgs(executable, args) d = utils.getProcessOutputAndValue(executable, args) def check(result): (stdout, stderr, code) = result self.assertEqual(code, 2) d.addCallback(check) return d buildbot-2.6.0/master/buildbot/test/unit/test_data_base.py000066400000000000000000000113541361162603000236760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.data import base from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util.misc import TestReactorMixin class ResourceType(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def makeResourceTypeSubclass(self, **attributes): attributes.setdefault('name', 'thing') return type('ThingResourceType', (base.ResourceType,), attributes) def test_sets_master(self): cls = self.makeResourceTypeSubclass() master = mock.Mock() inst = cls(master) self.assertIdentical(inst.master, master) def test_getEndpoints_instances_fails(self): ep = base.Endpoint(None, None) cls = self.makeResourceTypeSubclass(endpoints=[ep]) inst = cls(None) with self.assertRaises(TypeError): inst.getEndpoints() def test_getEndpoints_classes(self): class MyEndpoint(base.Endpoint): pass cls = self.makeResourceTypeSubclass(endpoints=[MyEndpoint]) master = mock.Mock() inst = cls(master) eps = inst.getEndpoints() self.assertIsInstance(eps[0], MyEndpoint) self.assertIdentical(eps[0].master, master) def test_produceEvent(self): cls = self.makeResourceTypeSubclass( name='singular', eventPathPatterns="/foo/:fooid/bar/:barid") master = fakemaster.make_master(self, wantMq=True) master.mq.verifyMessages = False # since this is a pretend message inst = cls(master) inst.produceEvent(dict(fooid=10, barid='20'), # note integer vs. string 'tested') master.mq.assertProductions([ (('foo', '10', 'bar', '20', 'tested'), dict(fooid=10, barid='20')) ]) def test_compilePatterns(self): class MyResourceType(base.ResourceType): eventPathPatterns = """ /builder/:builderid/build/:number /build/:buildid """ master = fakemaster.make_master(self, wantMq=True) master.mq.verifyMessages = False # since this is a pretend message inst = MyResourceType(master) self.assertEqual( inst.eventPaths, ['builder/{builderid}/build/{number}', 'build/{buildid}']) class Endpoint(endpoint.EndpointMixin, unittest.TestCase): class MyResourceType(base.ResourceType): name = "my" class MyEndpoint(base.Endpoint): pathPatterns = """ /my/pattern """ endpointClass = MyEndpoint resourceTypeClass = MyResourceType def setUp(self): self.setUpEndpoint() def tearDown(self): self.tearDownEndpoint() def test_sets_master(self): self.assertIdentical(self.master, self.ep.master) class ListResult(unittest.TestCase): def test_constructor(self): lr = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertEqual(lr.data, [1, 2, 3]) self.assertEqual(lr.offset, 10) self.assertEqual(lr.total, 20) self.assertEqual(lr.limit, 3) def test_repr(self): lr = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertTrue(repr(lr).startswith('ListResult')) def test_eq(self): lr1 = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) lr2 = base.ListResult([1, 2, 3], offset=20, total=30, limit=3) lr3 = base.ListResult([1, 2, 3], offset=20, total=30, limit=3) self.assertEqual(lr2, lr3) self.assertNotEqual(lr1, lr2) self.assertNotEqual(lr1, lr3) def test_eq_to_list(self): list = [1, 2, 3] lr1 = base.ListResult([1, 2, 3], offset=10, total=20, limit=3) self.assertNotEqual(lr1, list) lr2 = base.ListResult([1, 2, 3], offset=None, total=None, limit=None) self.assertEqual(lr2, list) lr3 = base.ListResult([1, 2, 3], total=3) self.assertEqual(lr3, list) lr4 = base.ListResult([1, 2, 3], total=4) self.assertNotEqual(lr4, list) buildbot-2.6.0/master/buildbot/test/unit/test_data_builders.py000066400000000000000000000244711361162603000246010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import builders from buildbot.data import resultspec from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class BuilderEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builders.BuilderEndpoint resourceTypeClass = builders.Builder def setUp(self): self.setUpEndpoint() return self.db.insertTestData([ fakedb.Builder(id=1, name='buildera'), fakedb.Builder(id=2, name='builderb'), fakedb.Master(id=13), fakedb.BuilderMaster(id=1, builderid=2, masterid=13), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): builder = yield self.callGet(('builders', 2)) self.validateData(builder) self.assertEqual(builder['name'], 'builderb') @defer.inlineCallbacks def test_get_missing(self): builder = yield self.callGet(('builders', 99)) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_missing_with_name(self): builder = yield self.callGet(('builders', 'builderc')) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_existing_with_master(self): builder = yield self.callGet(('masters', 13, 'builders', 2)) self.validateData(builder) self.assertEqual(builder['name'], 'builderb') @defer.inlineCallbacks def test_get_existing_with_different_master(self): builder = yield self.callGet(('masters', 14, 'builders', 2)) self.assertEqual(builder, None) @defer.inlineCallbacks def test_get_missing_with_master(self): builder = yield self.callGet(('masters', 13, 'builders', 99)) self.assertEqual(builder, None) class BuildersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builders.BuildersEndpoint resourceTypeClass = builders.Builder def setUp(self): self.setUpEndpoint() return self.db.insertTestData([ fakedb.Builder(id=1, name='buildera'), fakedb.Builder(id=2, name='builderb'), fakedb.Builder(id=3, name='builderTagA'), fakedb.Builder(id=4, name='builderTagB'), fakedb.Builder(id=5, name='builderTagAB'), fakedb.Tag(id=3, name="tagA"), fakedb.Tag(id=4, name="tagB"), fakedb.BuildersTags(builderid=3, tagid=3), fakedb.BuildersTags(builderid=4, tagid=4), fakedb.BuildersTags(builderid=5, tagid=3), fakedb.BuildersTags(builderid=5, tagid=4), fakedb.Master(id=13), fakedb.BuilderMaster(id=1, builderid=2, masterid=13), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): builders = yield self.callGet(('builders',)) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [1, 2, 3, 4, 5]) @defer.inlineCallbacks def test_get_masterid(self): builders = yield self.callGet(('masters', 13, 'builders')) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [2]) @defer.inlineCallbacks def test_get_masterid_missing(self): builders = yield self.callGet(('masters', 14, 'builders')) self.assertEqual(sorted([b['builderid'] for b in builders]), []) @defer.inlineCallbacks def test_get_contains_one_tag(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 5]) @defer.inlineCallbacks def test_get_contains_two_tags(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA", "tagB"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 4, 5]) @defer.inlineCallbacks def test_get_contains_two_tags_one_unknown(self): resultSpec = resultspec.ResultSpec( filters=[resultspec.Filter('tags', 'contains', ["tagA", "tagC"])]) builders = yield self.callGet(('builders',)) builders = resultSpec.apply(builders) [self.validateData(b) for b in builders] self.assertEqual(sorted([b['builderid'] for b in builders]), [3, 5]) class Builder(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = builders.Builder(self.master) return self.master.db.insertTestData([ fakedb.Master(id=13), fakedb.Master(id=14), ]) def test_signature_findBuilderId(self): @self.assertArgSpecMatches( self.master.data.updates.findBuilderId, # fake self.rtype.findBuilderId) # real def findBuilderId(self, name): pass def test_findBuilderId(self): # this just passes through to the db method, so test that rv = defer.succeed(None) self.master.db.builders.findBuilderId = mock.Mock(return_value=rv) self.assertIdentical(self.rtype.findBuilderId('foo'), rv) def test_signature_updateBuilderInfo(self): @self.assertArgSpecMatches(self.master.data.updates.updateBuilderInfo) def updateBuilderInfo(self, builderid, description, tags): pass def test_signature_updateBuilderList(self): @self.assertArgSpecMatches( self.master.data.updates.updateBuilderList, # fake self.rtype.updateBuilderList) # real def updateBuilderList(self, masterid, builderNames): pass @defer.inlineCallbacks def test_updateBuilderList(self): # add one builder master yield self.rtype.updateBuilderList(13, ['somebuilder']) self.assertEqual(sorted((yield self.master.db.builders.getBuilders())), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), ])) self.master.mq.assertProductions([(('builders', '1', 'started'), {'builderid': 1, 'masterid': 13, 'name': 'somebuilder'})]) # add another yield self.rtype.updateBuilderList(13, ['somebuilder', 'another']) def builderKey(builder): return builder['id'] self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), dict(id=2, masterids=[13], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([(('builders', '2', 'started'), {'builderid': 2, 'masterid': 13, 'name': 'another'})]) # add one for another master yield self.rtype.updateBuilderList(14, ['another']) self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict(id=1, masterids=[13], name='somebuilder', description=None, tags=[]), dict(id=2, masterids=[13, 14], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([(('builders', '2', 'started'), {'builderid': 2, 'masterid': 14, 'name': 'another'})]) # remove both for the first master yield self.rtype.updateBuilderList(13, []) self.assertEqual(sorted((yield self.master.db.builders.getBuilders()), key=builderKey), sorted([ dict( id=1, masterids=[], name='somebuilder', description=None, tags=[]), dict( id=2, masterids=[14], name='another', description=None, tags=[]), ], key=builderKey)) self.master.mq.assertProductions([ (('builders', '1', 'stopped'), {'builderid': 1, 'masterid': 13, 'name': 'somebuilder'}), (('builders', '2', 'stopped'), {'builderid': 2, 'masterid': 13, 'name': 'another'}), ]) @defer.inlineCallbacks def test__masterDeactivated(self): # this method just calls updateBuilderList, so test that. self.rtype.updateBuilderList = mock.Mock( spec=self.rtype.updateBuilderList) yield self.rtype._masterDeactivated(10) self.rtype.updateBuilderList.assert_called_with(10, []) buildbot-2.6.0/master/buildbot/test/unit/test_data_buildrequests.py000066400000000000000000000616501361162603000256630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import buildrequests from buildbot.data import resultspec from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import UTC from buildbot.util import epoch2datetime class TestBuildRequestEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildrequests.BuildRequestEndpoint resourceTypeClass = buildrequests.BuildRequest CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='bbb'), fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, waited_for=1), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def testGetExisting(self): self.db.buildrequests.claimBuildRequests( [44], claimed_at=self.CLAIMED_AT) self.db.buildrequests.completeBuildRequests( [44], 75, complete_at=self.COMPLETE_AT) buildrequest = yield self.callGet(('buildrequests', 44)) self.validateData(buildrequest) # check data formatting: self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['complete'], True) self.assertEqual(buildrequest['builderid'], 77) self.assertEqual(buildrequest['waited_for'], True) self.assertEqual(buildrequest['claimed_at'], self.CLAIMED_AT) self.assertEqual(buildrequest['results'], 75) self.assertEqual(buildrequest['claimed_by_masterid'], fakedb.FakeBuildRequestsComponent.MASTER_ID) self.assertEqual(buildrequest['claimed'], True) self.assertEqual(buildrequest['submitted_at'], self.SUBMITTED_AT) self.assertEqual(buildrequest['complete_at'], self.COMPLETE_AT) self.assertEqual(buildrequest['buildsetid'], 8822) self.assertEqual(buildrequest['priority'], 7) self.assertEqual(buildrequest['properties'], None) @defer.inlineCallbacks def testGetMissing(self): buildrequest = yield self.callGet(('buildrequests', 9999)) self.assertEqual(buildrequest, None) @defer.inlineCallbacks def testGetProperty(self): prop = resultspec.Property(b'property', 'eq', 'prop1') buildrequest = yield self.callGet(('buildrequests', 44), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['properties'], {'prop1': ('one', 'fake1')}) @defer.inlineCallbacks def testGetProperties(self): prop = resultspec.Property(b'property', 'eq', '*') buildrequest = yield self.callGet(('buildrequests', 44), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(buildrequest['buildrequestid'], 44) self.assertEqual(buildrequest['properties'], {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) class TestBuildRequestsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildrequests.BuildRequestsEndpoint resourceTypeClass = buildrequests.BuildRequest CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='bbb'), fakedb.Builder(id=78, name='ccc'), fakedb.Builder(id=79, name='ddd'), fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, waited_for=1), fakedb.BuildRequest(id=45, buildsetid=8822, builderid=77), fakedb.BuildRequest(id=46, buildsetid=8822, builderid=78), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def testGetAll(self): buildrequests = yield self.callGet(('buildrequests',)) [self.validateData(br) for br in buildrequests] self.assertEqual(sorted([br['buildrequestid'] for br in buildrequests]), [44, 45, 46]) @defer.inlineCallbacks def testGetNoBuildRequest(self): buildrequests = yield self.callGet(('builders', 79, 'buildrequests')) self.assertEqual(buildrequests, []) @defer.inlineCallbacks def testGetBuilderid(self): buildrequests = yield self.callGet(('builders', 78, 'buildrequests')) [self.validateData(br) for br in buildrequests] self.assertEqual( sorted([br['buildrequestid'] for br in buildrequests]), [46]) @defer.inlineCallbacks def testGetUnknownBuilderid(self): buildrequests = yield self.callGet(('builders', 79, 'buildrequests')) self.assertEqual(buildrequests, []) @defer.inlineCallbacks def testGetProperties(self): self.master.db.insertTestData([ fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) prop = resultspec.Property(b'property', 'eq', '*') buildrequests = yield self.callGet(('builders', 78, 'buildrequests'), resultSpec=resultspec.ResultSpec(properties=[prop])) self.assertEqual(len(buildrequests), 1) self.assertEqual(buildrequests[0]['buildrequestid'], 46) self.assertEqual(buildrequests[0]['properties'], {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) @defer.inlineCallbacks def testGetNoFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) yield self.callGet(('buildrequests',)) getBuildRequestsMock.assert_called_with( builderid=None, bsid=None, complete=None, claimed=None, resultSpec=resultspec.ResultSpec()) @defer.inlineCallbacks def testGetFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) f1 = resultspec.Filter('complete', 'eq', [False]) f2 = resultspec.Filter('claimed', 'eq', [True]) f3 = resultspec.Filter('buildsetid', 'eq', [55]) f4 = resultspec.Filter('branch', 'eq', ['mybranch']) f5 = resultspec.Filter('repository', 'eq', ['myrepo']) yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(filters=[f1, f2, f3, f4, f5])) getBuildRequestsMock.assert_called_with( builderid=None, bsid=55, complete=False, claimed=True, resultSpec=resultspec.ResultSpec(filters=[f4, f5])) @defer.inlineCallbacks def testGetClaimedByMasterIdFilters(self): getBuildRequestsMock = mock.Mock(return_value={}) self.patch( self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock) f1 = resultspec.Filter('claimed', 'eq', [True]) f2 = resultspec.Filter('claimed_by_masterid', 'eq', [fakedb.FakeBuildRequestsComponent.MASTER_ID]) yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(filters=[f1, f2])) getBuildRequestsMock.assert_called_with( builderid=None, bsid=None, complete=None, claimed=fakedb.FakeBuildRequestsComponent.MASTER_ID, resultSpec=resultspec.ResultSpec(filters=[f1])) @defer.inlineCallbacks def testGetSortedLimit(self): yield self.master.db.buildrequests.completeBuildRequests([44], 1) res = yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(order=['results'], limit=2)) self.assertEqual(len(res), 2) self.assertEqual(res[0]['results'], -1) res = yield self.callGet( ('buildrequests',), resultSpec=resultspec.ResultSpec(order=['-results'], limit=2)) self.assertEqual(len(res), 2) self.assertEqual(res[0]['results'], 1) class TestBuildRequest(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) class dBLayerException(Exception): pass def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = buildrequests.BuildRequest(self.master) @defer.inlineCallbacks def doTestCallthrough(self, dbMethodName, dbMockedMethod, method, methodargs=None, methodkwargs=None, expectedRes=None, expectedException=None, expectedDbApiCalled=True): self.patch(self.master.db.buildrequests, dbMethodName, dbMockedMethod) if expectedException is not None: try: yield method(*methodargs, **methodkwargs) except expectedException: pass except Exception as e: self.fail('%s exception should be raised, but got %r' % (expectedException, e)) else: self.fail('%s exception should be raised' % (expectedException,)) else: res = yield method(*methodargs, **methodkwargs) self.assertEqual(res, expectedRes) if expectedDbApiCalled: dbMockedMethod.assert_called_with(*methodargs, **methodkwargs) def testSignatureClaimBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.claimBuildRequests, # fake self.rtype.claimBuildRequests) # real def claimBuildRequests(self, brids, claimed_at=None): pass @defer.inlineCallbacks def testFakeDataClaimBuildRequests(self): self.master.db.insertTestData([ fakedb.BuildRequest(id=44, buildsetid=8822), fakedb.BuildRequest(id=55, buildsetid=8822), ]) res = yield self.master.data.updates.claimBuildRequests( [44, 55], claimed_at=self.CLAIMED_AT) self.assertTrue(res) @defer.inlineCallbacks def testFakeDataClaimBuildRequestsNoneArgs(self): res = yield self.master.data.updates.claimBuildRequests([]) self.assertTrue(res) @defer.inlineCallbacks def testClaimBuildRequests(self): self.master.db.insertTestData([ fakedb.Builder(id=123), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123), fakedb.BuildRequest(id=55, buildsetid=8822, builderid=123), ]) claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=True, expectedException=None) msg = { 'buildrequestid': 44, 'complete_at': None, 'complete': False, 'builderid': 123, 'waited_for': False, 'claimed_at': None, 'results': -1, 'priority': 0, 'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC), 'claimed': False, 'claimed_by_masterid': None, 'buildsetid': 8822, 'properties': None, } self.assertEqual(sorted(self.master.mq.productions), sorted([ (('buildrequests', '44', 'claimed'), msg), (('builders', '123', 'buildrequests', '44', 'claimed'), msg), (('buildsets', '8822', 'builders', '123', 'buildrequests', '44', 'claimed'), msg), ])) @defer.inlineCallbacks def testClaimBuildRequestsNoBrids(self): claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[]], methodkwargs=dict(), expectedRes=True, expectedException=None, expectedDbApiCalled=False) self.assertEqual(self.master.mq.productions, []) @defer.inlineCallbacks def testClaimBuildRequestsAlreadyClaimed(self): claimBuildRequestsMock = mock.Mock( side_effect=buildrequests.AlreadyClaimedError('oups ! buildrequest already claimed')) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=False, expectedException=None) self.assertEqual(self.master.mq.productions, []) @defer.inlineCallbacks def testClaimBuildRequestsUnknownException(self): claimBuildRequestsMock = mock.Mock( side_effect=self.dBLayerException('oups ! unknown error')) yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock, self.rtype.claimBuildRequests, methodargs=[[44]], methodkwargs=dict(claimed_at=self.CLAIMED_AT), expectedRes=None, expectedException=self.dBLayerException) self.assertEqual(self.master.mq.productions, []) def testSignatureUnclaimBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.unclaimBuildRequests, # fake self.rtype.unclaimBuildRequests) # real def unclaimBuildRequests(self, brids): pass @defer.inlineCallbacks def testFakeDataUnclaimBuildRequests(self): res = yield self.master.data.updates.unclaimBuildRequests([44, 55]) self.assertEqual(res, None) @defer.inlineCallbacks def testFakeDataUnclaimBuildRequestsNoneArgs(self): res = yield self.master.data.updates.unclaimBuildRequests([]) self.assertEqual(res, None) @defer.inlineCallbacks def testUnclaimBuildRequests(self): self.master.db.insertTestData([ fakedb.Builder(id=123), fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123), ]) unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('unclaimBuildRequests', unclaimBuildRequestsMock, self.rtype.unclaimBuildRequests, methodargs=[[44]], methodkwargs=dict(), expectedRes=None, expectedException=None) msg = { 'buildrequestid': 44, 'complete_at': None, 'complete': False, 'builderid': 123, 'waited_for': False, 'claimed_at': None, 'results': -1, 'priority': 0, 'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC), 'claimed': False, 'claimed_by_masterid': None, 'buildsetid': 8822, 'properties': None, } self.assertEqual(sorted(self.master.mq.productions), sorted([ (('buildrequests', '44', 'unclaimed'), msg), (('builders', '123', 'buildrequests', '44', 'unclaimed'), msg), (('buildsets', '8822', 'builders', '123', 'buildrequests', '44', 'unclaimed'), msg), ])) @defer.inlineCallbacks def testUnclaimBuildRequestsNoBrids(self): unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('unclaimBuildRequests', unclaimBuildRequestsMock, self.rtype.unclaimBuildRequests, methodargs=[[]], methodkwargs=dict(), expectedRes=None, expectedException=None, expectedDbApiCalled=False) def testSignatureCompleteBuildRequests(self): @self.assertArgSpecMatches( self.master.data.updates.completeBuildRequests, # fake self.rtype.completeBuildRequests) # real def completeBuildRequests(self, brids, results, complete_at=None): pass @defer.inlineCallbacks def testFakeDataCompleteBuildRequests(self): res = yield self.master.data.updates.completeBuildRequests( [44, 55], 12, complete_at=self.COMPLETE_AT) self.assertTrue(res) @defer.inlineCallbacks def testFakeDataCompleteBuildRequestsNoneArgs(self): res = yield self.master.data.updates.completeBuildRequests([], 0) self.assertTrue(res) @defer.inlineCallbacks def testCompleteBuildRequests(self): completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=True, expectedException=None) @defer.inlineCallbacks def testCompleteBuildRequestsNoBrids(self): completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None)) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[], 0], methodkwargs=dict(), expectedRes=True, expectedException=None, expectedDbApiCalled=False) @defer.inlineCallbacks def testCompleteBuildRequestsNotClaimed(self): completeBuildRequestsMock = mock.Mock( side_effect=buildrequests.NotClaimedError('oups ! buildrequest not claimed')) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=False, expectedException=None) @defer.inlineCallbacks def testCompleteBuildRequestsUnknownException(self): completeBuildRequestsMock = mock.Mock( side_effect=self.dBLayerException('oups ! unknown error')) yield self.doTestCallthrough('completeBuildRequests', completeBuildRequestsMock, self.rtype.completeBuildRequests, methodargs=[[46], 12], methodkwargs=dict(complete_at=self.COMPLETE_AT), expectedRes=None, expectedException=self.dBLayerException) @defer.inlineCallbacks def testRebuildBuildrequest(self): self.master.db.insertTestData([ fakedb.Builder(id=77, name='builder'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.SourceStamp(id=234), fakedb.BuildsetSourceStamp(buildsetid=8822, sourcestampid=234), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2', property_value='["two", "fake2"]'), ]) buildrequest = yield self.master.data.get(('buildrequests', 82)) new_bsid, brid_dict = yield self.rtype.rebuildBuildrequest(buildrequest) self.assertEqual(list(brid_dict.keys()), [77]) buildrequest = yield self.master.data.get(('buildrequests', brid_dict[77])) # submitted_at is the time of the test, so better not depend on it self.assertEqual(buildrequest, {'buildrequestid': 1001, 'complete': False, 'waited_for': False, 'claimed_at': None, 'results': -1, 'claimed': False, 'buildsetid': 200, 'complete_at': None, 'submitted_at': epoch2datetime(0), 'builderid': 77, 'claimed_by_masterid': None, 'priority': 0, 'properties': None}) buildset = yield self.master.data.get(('buildsets', new_bsid)) oldbuildset = yield self.master.data.get(('buildsets', 8822)) # assert same sourcestamp self.assertEqual(buildset['sourcestamps'], oldbuildset['sourcestamps']) buildset['sourcestamps'] = None self.assertEqual(buildset, {'bsid': 200, 'complete_at': None, 'submitted_at': 0, 'sourcestamps': None, 'parent_buildid': None, 'results': -1, 'parent_relationship': None, 'reason': 'rebuild', 'external_idstring': 'extid', 'complete': False}) properties = yield self.master.data.get(('buildsets', new_bsid, 'properties')) self.assertEqual( properties, {'prop1': ('one', 'fake1'), 'prop2': ('two', 'fake2')}) buildbot-2.6.0/master/buildbot/test/unit/test_data_builds.py000066400000000000000000000337561361162603000242600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import builds from buildbot.data import resultspec from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class BuildEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builds.BuildEndpoint resourceTypeClass = builds.Build def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=5), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): build = yield self.callGet(('builds', 14)) self.validateData(build) self.assertEqual(build['number'], 4) @defer.inlineCallbacks def test_get_missing(self): build = yield self.callGet(('builds', 9999)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_missing_builder_number(self): build = yield self.callGet(('builders', 999, 'builds', 4)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_builder_missing_number(self): build = yield self.callGet(('builders', 77, 'builds', 44)) self.assertEqual(build, None) @defer.inlineCallbacks def test_get_builder_number(self): build = yield self.callGet(('builders', 77, 'builds', 5)) self.validateData(build) self.assertEqual(build['buildid'], 15) @defer.inlineCallbacks def test_get_buildername_number(self): build = yield self.callGet(('builders', 'builder77', 'builds', 5)) self.validateData(build) self.assertEqual(build['buildid'], 15) @defer.inlineCallbacks def test_get_buildername_not_existing_number(self): build = yield self.callGet(('builders', 'builder77_nope', 'builds', 5)) self.assertEqual(build, None) @defer.inlineCallbacks def test_properties_injection(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('property', 'eq', [False])]) build = yield self.callGet(('builders', 77, 'builds', 5), resultSpec=resultSpec) self.validateData(build) self.assertIn('properties', build) @defer.inlineCallbacks def test_action_stop(self): yield self.callControl("stop", {}, ('builders', 77, 'builds', 5)) self.master.mq.assertProductions( [(('control', 'builds', '15', 'stop'), {'reason': 'no reason'})]) @defer.inlineCallbacks def test_action_stop_reason(self): yield self.callControl("stop", {'reason': 'because'}, ('builders', 77, 'builds', 5)) self.master.mq.assertProductions( [(('control', 'builds', '15', 'stop'), {'reason': 'because'})]) @defer.inlineCallbacks def test_action_rebuild(self): self.patch(self.master.data.updates, "rebuildBuildrequest", mock.Mock(spec=self.master.data.updates.rebuildBuildrequest, return_value=(1, [2]))) r = yield self.callControl("rebuild", {}, ('builders', 77, 'builds', 5)) self.assertEqual(r, (1, [2])) buildrequest = yield self.master.data.get(('buildrequests', 82)) self.master.data.updates.rebuildBuildrequest.assert_called_with( buildrequest) class BuildsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = builds.BuildsEndpoint resourceTypeClass = builds.Build def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Builder(id=78, name='builder78'), fakedb.Builder(id=79, name='builder79'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=78, masterid=88, workerid=12, buildrequestid=83, number=5, complete_at=1), fakedb.Build(id=16, builderid=79, masterid=88, workerid=12, buildrequestid=84, number=6, complete_at=1), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_all(self): builds = yield self.callGet(('builds',)) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4, 5, 6]) @defer.inlineCallbacks def test_get_builder(self): builds = yield self.callGet(('builders', 78, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5]) @defer.inlineCallbacks def test_get_buildername(self): builds = yield self.callGet(('builders', 'builder78', 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5]) @defer.inlineCallbacks def test_get_buildername_not_existing(self): builds = yield self.callGet(('builders', 'builder78_nope', 'builds')) self.assertEqual(builds, []) @defer.inlineCallbacks def test_get_buildrequest(self): builds = yield self.callGet(('buildrequests', 82, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_buildrequest_not_existing(self): builds = yield self.callGet(('buildrequests', 899, 'builds')) self.assertEqual(builds, []) @defer.inlineCallbacks def test_get_buildrequest_via_filter(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('buildrequestid', 'eq', [82])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_buildrequest_via_filter_with_string(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('buildrequestid', 'eq', ['82'])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_worker(self): builds = yield self.callGet(('workers', 13, 'builds')) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_complete(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('complete', 'eq', [False])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_get_complete_at(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('complete_at', 'eq', [None])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(build) for build in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) @defer.inlineCallbacks def test_properties_injection(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('property', 'eq', [False])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) for b in builds: self.validateData(b) self.assertIn('properties', b) @defer.inlineCallbacks def test_get_filter_eq(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('builderid', 'eq', [78, 79])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(b) for b in builds] self.assertEqual(sorted([b['number'] for b in builds]), [5, 6]) @defer.inlineCallbacks def test_get_filter_ne(self): resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('builderid', 'ne', [78, 79])]) builds = yield self.callGet(('builds',), resultSpec=resultSpec) [self.validateData(b) for b in builds] self.assertEqual(sorted([b['number'] for b in builds]), [3, 4]) class Build(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): new_build_event = {'builderid': 10, 'buildid': 100, 'buildrequestid': 13, 'workerid': 20, 'complete': False, 'complete_at': None, 'masterid': 824, 'number': 1, 'results': None, 'started_at': epoch2datetime(1), 'state_string': 'created', 'properties': {}} def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = builds.Build(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.builds, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with(*(exp_args or args), **(exp_kwargs or kwargs)) @defer.inlineCallbacks def do_test_event(self, method, exp_events=None, *args, **kwargs): self.reactor.advance(1) if exp_events is None: exp_events = [] yield method(*args, **kwargs) self.master.mq.assertProductions(exp_events) def test_signature_newBuild(self): @self.assertArgSpecMatches( self.master.data.updates.addBuild, # fake self.rtype.addBuild) # real def newBuild(self, builderid, buildrequestid, workerid): pass def test_newBuild(self): return self.do_test_callthrough('addBuild', self.rtype.addBuild, builderid=10, buildrequestid=13, workerid=20, exp_kwargs=dict(builderid=10, buildrequestid=13, workerid=20, masterid=self.master.masterid, state_string='created')) def test_newBuildEvent(self): @defer.inlineCallbacks def addBuild(*args, **kwargs): buildid, _ = yield self.rtype.addBuild(*args, **kwargs) yield self.rtype.generateNewBuildEvent(buildid) return None return self.do_test_event(addBuild, builderid=10, buildrequestid=13, workerid=20, exp_events=[(('builders', '10', 'builds', '1', 'new'), self.new_build_event), (('builds', '100', 'new'), self.new_build_event), (('workers', '20', 'builds', '100', 'new'), self.new_build_event)]) def test_signature_setBuildStateString(self): @self.assertArgSpecMatches( self.master.data.updates.setBuildStateString, # fake self.rtype.setBuildStateString) # real def setBuildStateString(self, buildid, state_string): pass def test_setBuildStateString(self): return self.do_test_callthrough('setBuildStateString', self.rtype.setBuildStateString, buildid=10, state_string='a b') def test_signature_finishBuild(self): @self.assertArgSpecMatches( self.master.data.updates.finishBuild, # fake self.rtype.finishBuild) # real def finishBuild(self, buildid, results): pass def test_finishBuild(self): return self.do_test_callthrough('finishBuild', self.rtype.finishBuild, buildid=15, results=3) buildbot-2.6.0/master/buildbot/test/unit/test_data_buildsets.py000066400000000000000000000367671361162603000250010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.data import buildsets from buildbot.data import resultspec from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces as util_interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime A_TIMESTAMP = 1341700729 A_TIMESTAMP_EPOCH = epoch2datetime(A_TIMESTAMP) EARLIER = 1248529376 EARLIER_EPOCH = epoch2datetime(EARLIER) class BuildsetEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildsets.BuildsetEndpoint resourceTypeClass = buildsets.Buildset def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=13, reason='because I said so'), fakedb.SourceStamp(id=92), fakedb.SourceStamp(id=93), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=93), fakedb.Buildset(id=14, reason='no sourcestamps'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): buildset = yield self.callGet(('buildsets', 13)) self.validateData(buildset) self.assertEqual(buildset['reason'], 'because I said so') @defer.inlineCallbacks def test_get_existing_no_sourcestamps(self): buildset = yield self.callGet(('buildsets', 14)) self.validateData(buildset) self.assertEqual(buildset['sourcestamps'], []) @defer.inlineCallbacks def test_get_missing(self): buildset = yield self.callGet(('buildsets', 99)) self.assertEqual(buildset, None) class BuildsetsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = buildsets.BuildsetsEndpoint resourceTypeClass = buildsets.Buildset def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=92), fakedb.Buildset(id=13, complete=True), fakedb.Buildset(id=14, complete=False), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=14, sourcestampid=92), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): buildsets = yield self.callGet(('buildsets',)) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 13) self.validateData(buildsets[1]) self.assertEqual(buildsets[1]['bsid'], 14) @defer.inlineCallbacks def test_get_complete(self): f = resultspec.Filter('complete', 'eq', [True]) buildsets = yield self.callGet(('buildsets',), resultSpec=resultspec.ResultSpec(filters=[f])) self.assertEqual(len(buildsets), 1) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 13) @defer.inlineCallbacks def test_get_incomplete(self): f = resultspec.Filter('complete', 'eq', [False]) buildsets = yield self.callGet(('buildsets',), resultSpec=resultspec.ResultSpec(filters=[f])) self.assertEqual(len(buildsets), 1) self.validateData(buildsets[0]) self.assertEqual(buildsets[0]['bsid'], 14) class Buildset(TestReactorMixin, util_interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = buildsets.Buildset(self.master) return self.master.db.insertTestData([ fakedb.SourceStamp(id=234, branch='br', codebase='cb', project='pr', repository='rep', revision='rev', created_at=89834834), fakedb.Builder(id=42, name='bldr1'), fakedb.Builder(id=43, name='bldr2'), ]) SS234_DATA = {'branch': 'br', 'codebase': 'cb', 'patch': None, 'project': 'pr', 'repository': 'rep', 'revision': 'rev', 'created_at': epoch2datetime(89834834), 'ssid': 234} def test_signature_addBuildset(self): @self.assertArgSpecMatches( self.master.data.updates.addBuildset, # fake self.rtype.addBuildset) # real def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): pass @defer.inlineCallbacks def do_test_addBuildset(self, kwargs, expectedReturn, expectedMessages, expectedBuildset): """Run a test of addBuildset. @param kwargs: kwargs to addBuildset @param expectedReturn: expected return value - tuple of (bsid, brids) @param expectedMessages: expected mq messages transmitted @param expectedBuildset: expected buildset inserted into the db The buildset is added at time A_TIMESTAMP. Note that addBuildset does not add sourcestamps, so this method assumes there are none in the db. """ self.reactor.advance(A_TIMESTAMP) (bsid, brids) = yield self.rtype.addBuildset(**kwargs) self.assertEqual((bsid, brids), expectedReturn) # check the correct message was received self.master.mq.assertProductions( expectedMessages, orderMatters=False) # and that the correct data was inserted into the db self.master.db.buildsets.assertBuildset(bsid, expectedBuildset) def _buildRequestMessageDict(self, brid, bsid, builderid): return {'builderid': builderid, 'buildrequestid': brid, 'buildsetid': bsid, 'claimed': False, 'claimed_at': None, 'claimed_by_masterid': None, 'complete': False, 'complete_at': None, 'priority': 0, 'results': -1, 'submitted_at': epoch2datetime(A_TIMESTAMP), 'waited_for': True, 'properties': None} def _buildRequestMessage1(self, brid, bsid, builderid): return ( ('buildsets', str(bsid), 'builders', str(builderid), 'buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildRequestMessage2(self, brid, bsid, builderid): return ( ('buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildRequestMessage3(self, brid, bsid, builderid): return ( ('builders', str(builderid), 'buildrequests', str(brid), 'new'), self._buildRequestMessageDict(brid, bsid, builderid)) def _buildsetMessage(self, bsid, external_idstring='extid', reason='because', scheduler='fakesched', sourcestampids=None, submitted_at=A_TIMESTAMP): if sourcestampids is None: sourcestampids = [234] ssmap = {234: self.SS234_DATA} return ( ('buildsets', str(bsid), 'new'), dict(bsid=bsid, complete=False, complete_at=None, external_idstring=external_idstring, reason=reason, results=None, scheduler=scheduler, sourcestamps=[ssmap[ssid] for ssid in sourcestampids], submitted_at=submitted_at)) def _buildsetCompleteMessage(self, bsid, complete_at=A_TIMESTAMP_EPOCH, submitted_at=A_TIMESTAMP_EPOCH, external_idstring='extid', reason='because', results=0, sourcestampids=None): if sourcestampids is None: sourcestampids = [234] ssmap = {234: self.SS234_DATA} return ( ('buildsets', str(bsid), 'complete'), dict(bsid=bsid, complete=True, complete_at=complete_at, external_idstring=external_idstring, reason=reason, results=results, submitted_at=submitted_at, sourcestamps=[ssmap[ssid] for ssid in sourcestampids])) def test_addBuildset_two_builderNames(self): @implementer(interfaces.IScheduler) class FakeSched: name = 'fakesched' kwargs = dict(scheduler='fakesched', reason='because', sourcestamps=[234], external_idstring='extid', builderids=[42, 43], waited_for=True) expectedReturn = (200, {42: 1000, 43: 1001}) expectedMessages = [ self._buildRequestMessage1(1000, 200, 42), self._buildRequestMessage2(1000, 200, 42), self._buildRequestMessage3(1000, 200, 42), self._buildRequestMessage1(1001, 200, 43), self._buildRequestMessage2(1001, 200, 43), self._buildRequestMessage3(1001, 200, 43), self._buildsetMessage(200), ] expectedBuildset = dict(reason='because', properties={}, external_idstring='extid') return self.do_test_addBuildset(kwargs, expectedReturn, expectedMessages, expectedBuildset) def test_addBuildset_no_builderNames(self): @implementer(interfaces.IScheduler) class FakeSched: name = 'fakesched' kwargs = dict(scheduler='fakesched', reason='because', sourcestamps=[234], external_idstring='extid', waited_for=False) expectedReturn = (200, {}) expectedMessages = [ self._buildsetMessage(200), # with no builderNames, this is done already self._buildsetCompleteMessage(200), ] expectedBuildset = dict(reason='because', properties={}, external_idstring='extid') return self.do_test_addBuildset(kwargs, expectedReturn, expectedMessages, expectedBuildset) def test_signature_maybeBuildsetComplete(self): @self.assertArgSpecMatches( self.master.data.updates.maybeBuildsetComplete, # fake self.rtype.maybeBuildsetComplete) # real def maybeBuildsetComplete(self, bsid): pass @defer.inlineCallbacks def do_test_maybeBuildsetComplete(self, buildRequestCompletions=None, buildRequestResults=None, buildsetComplete=False, expectComplete=False, expectMessage=False, expectSuccess=True): """Test maybeBuildsetComplete. @param buildRequestCompletions: dict mapping brid to True if complete, else False (and defaulting to False) @param buildRequestResults: dict mapping brid to result (defaulting to SUCCESS) @param buildsetComplete: true if the buildset is already complete @param expectComplete: true if the buildset should be complete at exit @param expectMessage: true if a buildset completion message is expected @param expectSuccess: if expectComplete, whether to expect the buildset to be complete This first adds two buildsets to the database - 72 and 73. Buildset 72 is already complete if buildsetComplete is true; 73 is not complete. It adds four buildrequests - 42, 43, and 44 for buildset 72, and 45 for buildset 73. The completion and results are based on buidlRequestCompletions and buildRequestResults. Then, maybeBuildsetComplete is called for buildset 72, and the expectations are checked. """ if buildRequestCompletions is None: buildRequestCompletions = {} if buildRequestResults is None: buildRequestResults = {} self.reactor.advance(A_TIMESTAMP) def mkbr(brid, bsid=72): return fakedb.BuildRequest(id=brid, buildsetid=bsid, builderid=42, complete=buildRequestCompletions.get( brid), results=buildRequestResults.get(brid, SUCCESS)) yield self.master.db.insertTestData([ fakedb.Builder(id=42, name='bldr1'), fakedb.Buildset(id=72, submitted_at=EARLIER, complete=buildsetComplete, complete_at=A_TIMESTAMP if buildsetComplete else None), mkbr(42), mkbr(43), mkbr(44), fakedb.BuildsetSourceStamp(buildsetid=72, sourcestampid=234), fakedb.Buildset(id=73, complete=False), mkbr(45, bsid=73), fakedb.BuildsetSourceStamp(buildsetid=73, sourcestampid=234), ]) yield self.rtype.maybeBuildsetComplete(72) self.master.db.buildsets.assertBuildsetCompletion(72, expectComplete) if expectMessage: self.assertEqual(self.master.mq.productions, [ self._buildsetCompleteMessage(72, results=SUCCESS if expectSuccess else FAILURE, submitted_at=EARLIER_EPOCH), ]) else: self.assertEqual(self.master.mq.productions, []) def test_maybeBuildsetComplete_not_yet(self): # only brid 42 is complete, so the buildset is not complete return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True}) def test_maybeBuildsetComplete_complete(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, expectComplete=True, expectMessage=True) def test_maybeBuildsetComplete_complete_failure(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, buildRequestResults={43: FAILURE}, expectComplete=True, expectMessage=True, expectSuccess=False) def test_maybeBuildsetComplete_already_complete(self): return self.do_test_maybeBuildsetComplete( buildRequestCompletions={42: True, 43: True, 44: True}, buildsetComplete=True, expectComplete=True, expectMessage=False) buildbot-2.6.0/master/buildbot/test/unit/test_data_changes.py000066400000000000000000000361101361162603000243710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import changes from buildbot.data import resultspec from buildbot.process.users import users from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class ChangeEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changes.ChangeEndpoint resourceTypeClass = changes.Change def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=234), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): change = yield self.callGet(('changes', '13')) self.validateData(change) self.assertEqual(change['project'], 'world-domination') @defer.inlineCallbacks def test_get_missing(self): change = yield self.callGet(('changes', '99')) self.assertEqual(change, None) class ChangesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changes.ChangesEndpoint resourceTypeClass = changes.Change def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=133), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=133), fakedb.SourceStamp(id=144), fakedb.Change(changeid=14, branch='devel', revision='9284', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=144), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): changes = yield self.callGet(('changes',)) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 13) self.validateData(changes[1]) self.assertEqual(changes[1]['changeid'], 14) @defer.inlineCallbacks def test_getRecentChanges(self): resultSpec = resultspec.ResultSpec(limit=1, order=('-changeid',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) self.validateData(changes[0]) self.assertEqual(changes[0]['changeid'], 14) self.assertEqual(len(changes), 1) @defer.inlineCallbacks def test_getChangesOtherOrder(self): resultSpec = resultspec.ResultSpec(limit=1, order=('-when_time_stamp',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) # limit not implemented for other order self.assertEqual(len(changes), 2) @defer.inlineCallbacks def test_getChangesOtherOffset(self): resultSpec = resultspec.ResultSpec( limit=1, offset=1, order=('-changeid',)) changes = yield self.callGet(('changes',), resultSpec=resultSpec) # limit not implemented for other offset self.assertEqual(len(changes), 2) class Change(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): changeEvent = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': '', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': '', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = changes.Change(self.master) def test_signature_addChange(self): @self.assertArgSpecMatches( self.master.data.updates.addChange, # fake self.rtype.addChange) # real def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): pass @defer.inlineCallbacks def do_test_addChange(self, kwargs, expectedRoutingKey, expectedMessage, expectedRow, expectedChangeUsers=None): if expectedChangeUsers is None: expectedChangeUsers = [] self.reactor.advance(10000000) changeid = yield self.rtype.addChange(**kwargs) self.assertEqual(changeid, 500) # check the correct message was received self.master.mq.assertProductions([ (expectedRoutingKey, expectedMessage), ]) # and that the correct data was inserted into the db self.master.db.changes.assertChange(500, expectedRow) self.master.db.changes.assertChangeUsers(500, expectedChangeUsers) def test_addChange(self): # src and codebase are default here kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') expectedMessage = self.changeEvent expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) @defer.inlineCallbacks def test_addChange_src_codebase(self): createUserObject = mock.Mock(spec=users.createUserObject) createUserObject.return_value = defer.succeed(123) self.patch(users, 'createUserObject', createUserObject) kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}, src='git', codebase='cb') expectedRoutingKey = ('changes', '500', 'new') expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'cb', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': 'cb', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='cb', project='Buildbot', sourcestampid=100, ) yield self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow, expectedChangeUsers=[123]) createUserObject.assert_called_once_with(self.master, 'warner', 'git') def test_addChange_src_codebaseGenerator(self): def preChangeGenerator(**kwargs): return kwargs self.master.config = mock.Mock(name='master.config') self.master.config.preChangeGenerator = preChangeGenerator self.master.config.codebaseGenerator = \ lambda change: 'cb-%s' % change['category'] kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': 'cb-devel', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': 'cb-devel', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='http://warner/0e92a098b', when_timestamp=256738404, category='devel', repository='git://warner', codebase='cb-devel', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) def test_addChange_repository_revision(self): self.master.config = mock.Mock(name='master.config') self.master.config.revlink = lambda rev, repo: 'foo%sbar%sbaz' % (repo, rev) # revlink is default here kwargs = dict(author='warner', committer='david', branch='warnerdb', category='devel', comments='fix whitespace', files=['master/buildbot/__init__.py'], project='Buildbot', repository='git://warner', codebase='', revision='0e92a098b', when_timestamp=256738404, properties={'foo': 20}) expectedRoutingKey = ('changes', '500', 'new') # When no revlink is passed to addChange, but a repository and revision is # passed, the revlink should be constructed by calling the revlink callable # in the config. We thus expect a revlink of 'foogit://warnerbar0e92a098bbaz' expectedMessage = { 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'codebase': '', 'comments': 'fix whitespace', 'changeid': 500, 'files': ['master/buildbot/__init__.py'], 'parent_changeids': [], 'project': 'Buildbot', 'properties': {'foo': (20, 'Change')}, 'repository': 'git://warner', 'revision': '0e92a098b', 'revlink': 'foogit://warnerbar0e92a098bbaz', 'when_timestamp': 256738404, 'sourcestamp': { 'branch': 'warnerdb', 'codebase': '', 'patch': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '0e92a098b', 'created_at': epoch2datetime(10000000), 'ssid': 100, }, # uid } expectedRow = fakedb.Change( changeid=500, author='warner', committer='david', comments='fix whitespace', branch='warnerdb', revision='0e92a098b', revlink='foogit://warnerbar0e92a098bbaz', when_timestamp=256738404, category='devel', repository='git://warner', codebase='', project='Buildbot', sourcestampid=100, ) return self.do_test_addChange(kwargs, expectedRoutingKey, expectedMessage, expectedRow) buildbot-2.6.0/master/buildbot/test/unit/test_data_changesources.py000066400000000000000000000204621361162603000256150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot.data import changesources from buildbot.db.changesources import ChangeSourceAlreadyClaimedError from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class ChangeSourceEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changesources.ChangeSourceEndpoint resourceTypeClass = changesources.ChangeSource def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=None), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), fakedb.ChangeSource(id=15, name='another:changesource'), fakedb.ChangeSourceMaster(changesourceid=15, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): """get an existing changesource by id""" changesource = yield self.callGet(('changesources', 14)) self.validateData(changesource) self.assertEqual(changesource['name'], 'other:changesource') @defer.inlineCallbacks def test_get_no_master(self): """get a changesource with no master""" changesource = yield self.callGet(('changesources', 13)) self.validateData(changesource) self.assertEqual(changesource['master'], None), @defer.inlineCallbacks def test_get_masterid_existing(self): """get an existing changesource by id on certain master""" changesource = yield self.callGet(('masters', 22, 'changesources', 14)) self.validateData(changesource) self.assertEqual(changesource['name'], 'other:changesource') @defer.inlineCallbacks def test_get_masterid_no_match(self): """get an existing changesource by id on the wrong master""" changesource = yield self.callGet(('masters', 33, 'changesources', 13)) self.assertEqual(changesource, None) @defer.inlineCallbacks def test_get_masterid_missing(self): """get an existing changesource by id on an invalid master""" changesource = yield self.callGet(('masters', 25, 'changesources', 13)) self.assertEqual(changesource, None) @defer.inlineCallbacks def test_get_missing(self): """get an invalid changesource by id""" changesource = yield self.callGet(('changesources', 99)) self.assertEqual(changesource, None) class ChangeSourcesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = changesources.ChangeSourcesEndpoint resourceTypeClass = changesources.ChangeSource def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=None), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), fakedb.ChangeSource(id=15, name='another:changesource'), fakedb.ChangeSourceMaster(changesourceid=15, masterid=33), fakedb.ChangeSource(id=16, name='wholenother:changesource'), fakedb.ChangeSourceMaster(changesourceid=16, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): changesources = yield self.callGet(('changesources',)) [self.validateData(cs) for cs in changesources] self.assertEqual(sorted([m['changesourceid'] for m in changesources]), [13, 14, 15, 16]) @defer.inlineCallbacks def test_get_masterid(self): changesources = yield self.callGet(('masters', 33, 'changesources')) [self.validateData(cs) for cs in changesources] self.assertEqual(sorted([m['changesourceid'] for m in changesources]), [15, 16]) @defer.inlineCallbacks def test_get_masterid_missing(self): changesources = yield self.callGet(('masters', 23, 'changesources')) self.assertEqual(changesources, []) class ChangeSource(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = changesources.ChangeSource(self.master) def test_signature_findChangeSourceId(self): @self.assertArgSpecMatches( self.master.data.updates.findChangeSourceId, # fake self.rtype.findChangeSourceId) # real def findChangeSourceId(self, name): pass @defer.inlineCallbacks def test_findChangeSourceId(self): self.master.db.changesources.findChangeSourceId = mock.Mock( return_value=defer.succeed(10)) self.assertEqual((yield self.rtype.findChangeSourceId('cs')), 10) self.master.db.changesources.findChangeSourceId.assert_called_with( 'cs') def test_signature_trySetChangeSourceMaster(self): @self.assertArgSpecMatches( self.master.data.updates.trySetChangeSourceMaster, # fake self.rtype.trySetChangeSourceMaster) # real def trySetChangeSourceMaster(self, changesourceid, masterid): pass @defer.inlineCallbacks def test_trySetChangeSourceMaster_succeeds(self): self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=defer.succeed(None)) yield self.rtype.trySetChangeSourceMaster(10, 20) self.master.db.changesources.setChangeSourceMaster.assert_called_with( 10, 20) @defer.inlineCallbacks def test_trySetChangeSourceMaster_fails(self): d = defer.fail(failure.Failure( ChangeSourceAlreadyClaimedError('oh noes'))) self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=d) result = yield self.rtype.trySetChangeSourceMaster(10, 20) self.assertFalse(result) @defer.inlineCallbacks def test_trySetChangeSourceMaster_raisesOddException(self): d = defer.fail(failure.Failure(RuntimeError('oh noes'))) self.master.db.changesources.setChangeSourceMaster = mock.Mock( return_value=d) try: yield self.rtype.trySetChangeSourceMaster(10, 20) except RuntimeError: pass else: self.fail("The RuntimeError did not propagate") @defer.inlineCallbacks def test__masterDeactivated(self): yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.ChangeSource(id=13, name='some:changesource'), fakedb.ChangeSourceMaster(changesourceid=13, masterid=22), fakedb.ChangeSource(id=14, name='other:changesource'), fakedb.ChangeSourceMaster(changesourceid=14, masterid=22), ]) yield self.rtype._masterDeactivated(22) self.master.db.changesources.assertChangeSourceMaster(13, None) self.master.db.changesources.assertChangeSourceMaster(14, None) buildbot-2.6.0/master/buildbot/test/unit/test_data_connector.py000066400000000000000000000216211361162603000247540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import reflect from twisted.trial import unittest from buildbot.data import base from buildbot.data import connector from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.data import types from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class Tests(interfaces.InterfaceTests): def setUp(self): raise NotImplementedError def test_signature_get(self): @self.assertArgSpecMatches(self.data.get) def get(self, path, filters=None, fields=None, order=None, limit=None, offset=None): pass def test_signature_getEndpoint(self): @self.assertArgSpecMatches(self.data.getEndpoint) def getEndpoint(self, path): pass def test_signature_control(self): @self.assertArgSpecMatches(self.data.control) def control(self, action, args, path): pass def test_signature_updates_addChange(self): @self.assertArgSpecMatches(self.data.updates.addChange) def addChange(self, files=None, comments=None, author=None, committer=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase=None, project='', src=None): pass def test_signature_updates_masterActive(self): @self.assertArgSpecMatches(self.data.updates.masterActive) def masterActive(self, name, masterid): pass def test_signature_updates_masterStopped(self): @self.assertArgSpecMatches(self.data.updates.masterStopped) def masterStopped(self, name, masterid): pass def test_signature_updates_addBuildset(self): @self.assertArgSpecMatches(self.data.updates.addBuildset) def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='', properties=None, builderids=None, external_idstring=None, parent_buildid=None, parent_relationship=None): pass def test_signature_updates_maybeBuildsetComplete(self): @self.assertArgSpecMatches(self.data.updates.maybeBuildsetComplete) def maybeBuildsetComplete(self, bsid): pass def test_signature_updates_updateBuilderList(self): @self.assertArgSpecMatches(self.data.updates.updateBuilderList) def updateBuilderList(self, masterid, builderNames): pass class TestFakeData(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) self.data = self.master.data class TestDataConnector(TestReactorMixin, unittest.TestCase, Tests): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True) self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) class DataConnector(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) # don't load by default self.patch(connector.DataConnector, 'submodules', []) self.data = connector.DataConnector() yield self.data.setServiceParent(self.master) def patchFooPattern(self): cls = type('FooEndpoint', (base.Endpoint,), {}) ep = cls(None, self.master) ep.get = mock.Mock(name='FooEndpoint.get') ep.get.return_value = defer.succeed({'val': 9999}) self.data.matcher[('foo', 'n:fooid', 'bar')] = ep return ep def patchFooListPattern(self): cls = type('FoosEndpoint', (base.Endpoint,), {}) ep = cls(None, self.master) ep.get = mock.Mock(name='FoosEndpoint.get') ep.get.return_value = defer.succeed( [{'val': v} for v in range(900, 920)]) self.data.matcher[('foo',)] = ep return ep # tests def test_sets_master(self): self.assertIdentical(self.master, self.data.master) def test_scanModule(self): # use this module as a test mod = reflect.namedModule('buildbot.test.unit.test_data_connector') self.data._scanModule(mod) # check that it discovered MyResourceType and updated endpoints match = self.data.matcher[('test', '10')] self.assertIsInstance(match[0], TestEndpoint) self.assertEqual(match[1], dict(testid=10)) match = self.data.matcher[('test', '10', 'p1')] self.assertIsInstance(match[0], TestEndpoint) match = self.data.matcher[('test', '10', 'p2')] self.assertIsInstance(match[0], TestEndpoint) match = self.data.matcher[('test',)] self.assertIsInstance(match[0], TestsEndpoint) self.assertEqual(match[1], dict()) match = self.data.matcher[('test', 'foo')] self.assertIsInstance(match[0], TestsEndpointSubclass) self.assertEqual(match[1], dict()) # and that it found the update method self.assertEqual(self.data.updates.testUpdate(), "testUpdate return") # and that it added the single root link self.assertEqual(self.data.rootLinks, [{'name': 'tests'}]) # and that it added an attribute self.assertIsInstance(self.data.rtypes.test, TestResourceType) def test_getEndpoint(self): ep = self.patchFooPattern() got = self.data.getEndpoint(('foo', '10', 'bar')) self.assertEqual(got, (ep, {'fooid': 10})) def test_getEndpoint_missing(self): with self.assertRaises(exceptions.InvalidPathError): self.data.getEndpoint(('xyz',)) @defer.inlineCallbacks def test_get(self): ep = self.patchFooPattern() gotten = yield self.data.get(('foo', '10', 'bar')) self.assertEqual(gotten, {'val': 9999}) ep.get.assert_called_once_with(mock.ANY, {'fooid': 10}) @defer.inlineCallbacks def test_get_filters(self): ep = self.patchFooListPattern() gotten = yield self.data.get(('foo',), filters=[resultspec.Filter('val', 'lt', [902])]) self.assertEqual(gotten, base.ListResult( [{'val': 900}, {'val': 901}], total=2)) ep.get.assert_called_once_with(mock.ANY, {}) @defer.inlineCallbacks def test_get_resultSpec_args(self): ep = self.patchFooListPattern() f = resultspec.Filter('val', 'gt', [909]) gotten = yield self.data.get(('foo',), filters=[f], fields=['val'], order=['-val'], limit=2) self.assertEqual(gotten, base.ListResult( [{'val': 919}, {'val': 918}], total=10, limit=2)) ep.get.assert_called_once_with(mock.ANY, {}) @defer.inlineCallbacks def test_control(self): ep = self.patchFooPattern() ep.control = mock.Mock(name='MyEndpoint.control') ep.control.return_value = defer.succeed('controlled') gotten = yield self.data.control('foo!', {'arg': 2}, ('foo', '10', 'bar')) self.assertEqual(gotten, 'controlled') ep.control.assert_called_once_with('foo!', {'arg': 2}, {'fooid': 10}) # classes discovered by test_scanModule, above class TestsEndpoint(base.Endpoint): pathPatterns = "/test" rootLinkName = 'tests' class TestsEndpointParentClass(base.Endpoint): rootLinkName = 'shouldnt-see-this' class TestsEndpointSubclass(TestsEndpointParentClass): pathPatterns = "/test/foo" class TestEndpoint(base.Endpoint): pathPatterns = """ /test/n:testid /test/n:testid/p1 /test/n:testid/p2 """ class TestResourceType(base.ResourceType): name = 'test' endpoints = [TestsEndpoint, TestEndpoint, TestsEndpointSubclass] keyFields = ('testid', ) class EntityType(types.Entity): testid = types.Integer() entityType = EntityType(name) @base.updateMethod def testUpdate(self): return "testUpdate return" buildbot-2.6.0/master/buildbot/test/unit/test_data_forceschedulers.py000066400000000000000000000166711361162603000261530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import forceschedulers from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test.util import endpoint expected_default = { 'all_fields': [{'columns': 1, 'autopopulate': None, 'default': '', 'fields': [{'default': '', 'autopopulate': None, 'fullName': 'username', 'hide': False, 'label': 'Your name:', 'maxsize': None, 'multiple': False, 'name': 'username', 'need_email': True, 'regex': None, 'required': False, 'size': 30, 'tablabel': 'Your name:', 'type': 'username'}, {'default': 'force build', 'autopopulate': None, 'fullName': 'reason', 'hide': False, 'label': 'reason', 'maxsize': None, 'multiple': False, 'name': 'reason', 'regex': None, 'required': False, 'size': 20, 'tablabel': 'reason', 'type': 'text'}], 'fullName': None, 'hide': False, 'label': '', 'layout': 'vertical', 'maxsize': None, 'multiple': False, 'name': '', 'regex': None, 'required': False, 'tablabel': '', 'type': 'nested'}, {'columns': 2, 'default': '', 'fields': [{'default': '', 'autopopulate': None, 'fullName': 'branch', 'hide': False, 'label': 'Branch:', 'multiple': False, 'maxsize': None, 'name': 'branch', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Branch:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'project', 'hide': False, 'label': 'Project:', 'maxsize': None, 'multiple': False, 'name': 'project', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Project:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'repository', 'hide': False, 'label': 'Repository:', 'maxsize': None, 'multiple': False, 'name': 'repository', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Repository:', 'type': 'text'}, {'default': '', 'autopopulate': None, 'fullName': 'revision', 'hide': False, 'label': 'Revision:', 'maxsize': None, 'multiple': False, 'name': 'revision', 'regex': None, 'required': False, 'size': 10, 'tablabel': 'Revision:', 'type': 'text'}], 'autopopulate': None, 'fullName': None, 'hide': False, 'label': '', 'layout': 'vertical', 'maxsize': None, 'multiple': False, 'name': '', 'regex': None, 'required': False, 'tablabel': '', 'type': 'nested'}], 'builder_names': ['builder'], 'button_name': 'defaultforce', 'label': 'defaultforce', 'name': 'defaultforce', 'enabled': True} class ForceschedulerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = forceschedulers.ForceSchedulerEndpoint resourceTypeClass = forceschedulers.ForceScheduler maxDiff = None def setUp(self): self.setUpEndpoint() scheds = [ForceScheduler( name="defaultforce", builderNames=["builder"])] self.master.allSchedulers = lambda: scheds def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): res = yield self.callGet(('forceschedulers', "defaultforce")) self.validateData(res) self.assertEqual(res, expected_default) @defer.inlineCallbacks def test_get_missing(self): res = yield self.callGet(('forceschedulers', 'foo')) self.assertEqual(res, None) class ForceSchedulersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = forceschedulers.ForceSchedulersEndpoint resourceTypeClass = forceschedulers.ForceScheduler maxDiff = None def setUp(self): self.setUpEndpoint() scheds = [ForceScheduler( name="defaultforce", builderNames=["builder"])] self.master.allSchedulers = lambda: scheds def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): res = yield self.callGet(('forceschedulers', )) self.assertEqual(res, [expected_default]) buildbot-2.6.0/master/buildbot/test/unit/test_data_logchunks.py000066400000000000000000000174201361162603000247610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.data import logchunks from buildbot.data import resultspec from buildbot.test.fake import fakedb from buildbot.test.util import endpoint class LogChunkEndpointBase(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logchunks.LogChunkEndpoint resourceTypeClass = logchunks.LogChunk endpointname = "contents" log60Lines = ['line zero', 'line 1', 'line TWO', 'line 3', 'line 2**2', 'another line', 'yet another line'] log61Lines = ['%08d' % i for i in range(100)] def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77), fakedb.Worker(id=13, name='wrk'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=9, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', slug='stdio', type='s', num_lines=7), fakedb.LogChunk(logid=60, first_line=0, last_line=1, compressed=0, content=textwrap.dedent("""\ line zero line 1""")), fakedb.LogChunk(logid=60, first_line=2, last_line=4, compressed=0, content=textwrap.dedent("""\ line TWO line 3 line 2**2""")), fakedb.LogChunk(logid=60, first_line=5, last_line=5, compressed=0, content="another line"), fakedb.LogChunk(logid=60, first_line=6, last_line=6, compressed=0, content="yet another line"), fakedb.Log(id=61, stepid=50, name='errors', slug='errors', type='t', num_lines=100), ] + [ fakedb.LogChunk(logid=61, first_line=i, last_line=i, compressed=0, content="%08d" % i) for i in range(100) ] + [ fakedb.Log(id=62, stepid=50, name='notes', slug='notes', type='t', num_lines=0), # logid 62 is empty ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def do_test_chunks(self, path, logid, expLines): # get the whole thing in one go logchunk = yield self.callGet(path) self.validateData(logchunk) expContent = '\n'.join(expLines) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': 0, 'content': expContent}) # line-by-line for i, expLine in enumerate(expLines): logchunk = yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=i, limit=1)) self.validateData(logchunk) self.assertEqual(logchunk, {'logid': logid, 'firstline': i, 'content': expLines[i] + '\n'}) # half and half mid = int(len(expLines) / 2) for f, length in (0, mid), (mid, len(expLines) - 1): logchunk = yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=f, limit=length - f + 1)) self.validateData(logchunk) expContent = '\n'.join(expLines[f:length + 1]) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': f, 'content': expContent}) # truncated at EOF f, length = len(expLines) - 2, len(expLines) + 10 logchunk = yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=f, limit=length - f + 1)) self.validateData(logchunk) expContent = '\n'.join(expLines[-2:]) + '\n' self.assertEqual(logchunk, {'logid': logid, 'firstline': f, 'content': expContent}) # some illegal stuff self.assertEqual( (yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=-1))), None) self.assertEqual( (yield self.callGet(path, resultSpec=resultspec.ResultSpec(offset=10, limit=-1))), None) def test_get_logid_60(self): return self.do_test_chunks(('logs', 60, self.endpointname), 60, self.log60Lines) def test_get_logid_61(self): return self.do_test_chunks(('logs', 61, self.endpointname), 61, self.log61Lines) class LogChunkEndpoint(LogChunkEndpointBase): @defer.inlineCallbacks def test_get_missing(self): logchunk = yield self.callGet(('logs', 99, self.endpointname)) self.assertEqual(logchunk, None) @defer.inlineCallbacks def test_get_empty(self): logchunk = yield self.callGet(('logs', 62, self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['content'], '') @defer.inlineCallbacks def test_get_by_stepid(self): logchunk = yield self.callGet( ('steps', 50, 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) @defer.inlineCallbacks def test_get_by_buildid(self): logchunk = yield self.callGet( ('builds', 13, 'steps', 9, 'logs', 'stdio', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 60) @defer.inlineCallbacks def test_get_by_builder(self): logchunk = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 9, 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) @defer.inlineCallbacks def test_get_by_builder_step_name(self): logchunk = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 'make', 'logs', 'errors', self.endpointname)) self.validateData(logchunk) self.assertEqual(logchunk['logid'], 61) class RawLogChunkEndpoint(LogChunkEndpointBase): endpointClass = logchunks.RawLogChunkEndpoint endpointname = "raw" def validateData(self, data): self.assertIsInstance(data['raw'], str) self.assertIsInstance(data['mime-type'], str) self.assertIsInstance(data['filename'], str) @defer.inlineCallbacks def do_test_chunks(self, path, logid, expLines): # get the whole thing in one go logchunk = yield self.callGet(path) self.validateData(logchunk) if logid == 60: expContent = '\n'.join([line[1:] for line in expLines]) expFilename = "stdio" else: expContent = '\n'.join(expLines) + '\n' expFilename = "errors" self.assertEqual(logchunk, {'filename': expFilename, 'mime-type': "text/plain", 'raw': expContent}) buildbot-2.6.0/master/buildbot/test/unit/test_data_logs.py000066400000000000000000000227201361162603000237270ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import logs from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class LogEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logs.LogEndpoint resourceTypeClass = logs.Log def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=5, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', slug='stdio', type='s'), fakedb.Log(id=61, stepid=50, name='errors', slug='errors', type='t'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): log = yield self.callGet(('logs', 60)) self.validateData(log) self.assertEqual(log, { 'logid': 60, 'name': 'stdio', 'slug': 'stdio', 'stepid': 50, 'complete': False, 'num_lines': 0, 'type': 's'}) @defer.inlineCallbacks def test_get_missing(self): log = yield self.callGet(('logs', 62)) self.assertEqual(log, None) @defer.inlineCallbacks def test_get_by_stepid(self): log = yield self.callGet(('steps', 50, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_buildid(self): log = yield self.callGet(('builds', 13, 'steps', 5, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_builder(self): log = yield self.callGet( ('builders', '77', 'builds', 3, 'steps', 5, 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_builder_step_name(self): log = yield self.callGet( ('builders', '77', 'builds', 3, 'steps', 'make', 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') @defer.inlineCallbacks def test_get_by_buildername_step_name(self): log = yield self.callGet( ('builders', 'builder77', 'builds', 3, 'steps', 'make', 'logs', 'errors')) self.validateData(log) self.assertEqual(log['name'], 'errors') class LogsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = logs.LogsEndpoint resourceTypeClass = logs.Log def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Builder(id=77), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Step(id=50, buildid=13, number=9, name='make'), fakedb.Log(id=60, stepid=50, name='stdio', type='s'), fakedb.Log(id=61, stepid=50, name='errors', type='t'), fakedb.Step(id=51, buildid=13, number=10, name='make_install'), fakedb.Log(id=70, stepid=51, name='stdio', type='s'), fakedb.Log(id=71, stepid=51, name='results_html', type='h'), fakedb.Step(id=52, buildid=13, number=11, name='nothing'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_stepid(self): logs = yield self.callGet(('steps', 50, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['errors', 'stdio']) @defer.inlineCallbacks def test_get_stepid_empty(self): logs = yield self.callGet(('steps', 52, 'logs')) self.assertEqual(logs, []) @defer.inlineCallbacks def test_get_stepid_missing(self): logs = yield self.callGet(('steps', 99, 'logs')) self.assertEqual(logs, []) @defer.inlineCallbacks def test_get_buildid_step_name(self): logs = yield self.callGet( ('builds', 13, 'steps', 'make_install', 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) @defer.inlineCallbacks def test_get_buildid_step_number(self): logs = yield self.callGet(('builds', 13, 'steps', 10, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) @defer.inlineCallbacks def test_get_builder_build_number_step_name(self): logs = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 'make', 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['errors', 'stdio']) @defer.inlineCallbacks def test_get_builder_build_number_step_number(self): logs = yield self.callGet( ('builders', 77, 'builds', 3, 'steps', 10, 'logs')) [self.validateData(log) for log in logs] self.assertEqual(sorted([b['name'] for b in logs]), ['results_html', 'stdio']) class Log(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = logs.Log(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.logs, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with(*(exp_args or args), **(exp_kwargs or kwargs)) def test_signature_newLog(self): @self.assertArgSpecMatches( self.master.data.updates.addLog, # fake self.rtype.addLog) # real def newLog(self, stepid, name, type): pass @defer.inlineCallbacks def test_newLog_uniquify(self): tries = [] @self.assertArgSpecMatches(self.master.db.logs.addLog) def addLog(stepid, name, slug, type): tries.append((stepid, name, slug, type)) if len(tries) < 3: return defer.fail(KeyError()) return defer.succeed(23) self.patch(self.master.db.logs, 'addLog', addLog) logid = yield self.rtype.addLog( stepid=13, name='foo', type='s') self.assertEqual(logid, 23) self.assertEqual(tries, [ (13, 'foo', 'foo', 's'), (13, 'foo', 'foo_2', 's'), (13, 'foo', 'foo_3', 's'), ]) def test_signature_finishLog(self): @self.assertArgSpecMatches( self.master.data.updates.finishLog, # fake self.rtype.finishLog) # real def finishLog(self, logid): pass def test_finishLog(self): self.do_test_callthrough('finishLog', self.rtype.finishLog, logid=10) def test_signature_compressLog(self): @self.assertArgSpecMatches( self.master.data.updates.compressLog, # fake self.rtype.compressLog) # real def compressLog(self, logid): pass def test_compressLog(self): self.do_test_callthrough('compressLog', self.rtype.compressLog, logid=10) def test_signature_appendLog(self): @self.assertArgSpecMatches( self.master.data.updates.appendLog, # fake self.rtype.appendLog) # real def appendLog(self, logid, content): pass def test_appendLog(self): self.do_test_callthrough('appendLog', self.rtype.appendLog, logid=10, content='foo\nbar\n') buildbot-2.6.0/master/buildbot/test/unit/test_data_masters.py000066400000000000000000000261211361162603000244400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import masters from buildbot.process.results import RETRY from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime SOMETIME = 1349016870 OTHERTIME = 1249016870 class MasterEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = masters.MasterEndpoint resourceTypeClass = masters.Master def setUp(self): self.setUpEndpoint() self.master.name = "myname" self.db.insertTestData([ fakedb.Master(id=13, name='some:master', active=False, last_active=SOMETIME), fakedb.Master(id=14, name='other:master', active=False, last_active=SOMETIME), fakedb.Builder(id=23, name='bldr1'), fakedb.BuilderMaster(builderid=23, masterid=13), fakedb.Builder(id=24, name='bldr2'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): master = yield self.callGet(('masters', 14)) self.validateData(master) self.assertEqual(master['name'], 'other:master') @defer.inlineCallbacks def test_get_builderid_existing(self): master = yield self.callGet(('builders', 23, 'masters', 13)) self.validateData(master) self.assertEqual(master['name'], 'some:master') @defer.inlineCallbacks def test_get_builderid_no_match(self): master = yield self.callGet(('builders', 24, 'masters', 13)) self.assertEqual(master, None) @defer.inlineCallbacks def test_get_builderid_missing(self): master = yield self.callGet(('builders', 25, 'masters', 13)) self.assertEqual(master, None) @defer.inlineCallbacks def test_get_missing(self): master = yield self.callGet(('masters', 99)) self.assertEqual(master, None) class MastersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = masters.MastersEndpoint resourceTypeClass = masters.Master def setUp(self): self.setUpEndpoint() self.master.name = "myname" self.db.insertTestData([ fakedb.Master(id=13, name='some:master', active=False, last_active=SOMETIME), fakedb.Master(id=14, name='other:master', active=True, last_active=OTHERTIME), fakedb.Builder(id=22), fakedb.BuilderMaster(masterid=13, builderid=22), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): masters = yield self.callGet(('masters',)) [self.validateData(m) for m in masters] self.assertEqual(sorted([m['masterid'] for m in masters]), [13, 14]) @defer.inlineCallbacks def test_get_builderid(self): masters = yield self.callGet(('builders', 22, 'masters')) [self.validateData(m) for m in masters] self.assertEqual(sorted([m['masterid'] for m in masters]), [13]) @defer.inlineCallbacks def test_get_builderid_missing(self): masters = yield self.callGet(('builders', 23, 'masters')) self.assertEqual(masters, []) class Master(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = masters.Master(self.master) def test_signature_masterActive(self): @self.assertArgSpecMatches( self.master.data.updates.masterActive, # fake self.rtype.masterActive) # real def masterActive(self, name, masterid): pass @defer.inlineCallbacks def test_masterActive(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='myname', active=0, last_active=0), fakedb.Master(id=14, name='other', active=1, last_active=0), fakedb.Master(id=15, name='other2', active=1, last_active=0), ]) # initial checkin yield self.rtype.masterActive(name='myname', masterid=13) master = yield self.master.db.masters.getMaster(13) self.assertEqual(master, dict(id=13, name='myname', active=True, last_active=epoch2datetime(60))) self.assertEqual(self.master.mq.productions, [ (('masters', '13', 'started'), dict(masterid=13, name='myname', active=True)), ]) self.master.mq.productions = [] # updated checkin time, re-activation self.reactor.advance(60) yield self.master.db.masters.markMasterInactive(13) yield self.rtype.masterActive('myname', masterid=13) master = yield self.master.db.masters.getMaster(13) self.assertEqual(master, dict(id=13, name='myname', active=True, last_active=epoch2datetime(120))) self.assertEqual(self.master.mq.productions, [ (('masters', '13', 'started'), dict(masterid=13, name='myname', active=True)), ]) self.master.mq.productions = [] def test_signature_masterStopped(self): @self.assertArgSpecMatches( self.master.data.updates.masterStopped, # fake self.rtype.masterStopped) # real def masterStopped(self, name, masterid): pass @defer.inlineCallbacks def test_masterStopped(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='aname', active=1, last_active=self.reactor.seconds()), ]) self.rtype._masterDeactivated = mock.Mock() yield self.rtype.masterStopped(name='aname', masterid=13) self.rtype._masterDeactivated. \ assert_called_with(13, 'aname') @defer.inlineCallbacks def test_masterStopped_already(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=13, name='aname', active=0, last_active=0), ]) self.rtype._masterDeactivated = mock.Mock() yield self.rtype.masterStopped(name='aname', masterid=13) self.rtype._masterDeactivated.assert_not_called() def test_signature_expireMasters(self): @self.assertArgSpecMatches( self.master.data.updates.expireMasters, # fake self.rtype.expireMasters) # real def expireMasters(self, forceHouseKeeping=False): pass @defer.inlineCallbacks def test_expireMasters(self): self.reactor.advance(60) self.master.db.insertTestData([ fakedb.Master(id=14, name='other', active=1, last_active=0), fakedb.Master(id=15, name='other', active=1, last_active=0), ]) self.rtype._masterDeactivated = mock.Mock() # check after 10 minutes, and see #14 deactivated; #15 gets deactivated # by another master, so it's not included here self.reactor.advance(600) yield self.master.db.masters.markMasterInactive(15) yield self.rtype.expireMasters() master = yield self.master.db.masters.getMaster(14) self.assertEqual(master, dict(id=14, name='other', active=False, last_active=epoch2datetime(0))) self.rtype._masterDeactivated. \ assert_called_with(14, 'other') @defer.inlineCallbacks def test_masterDeactivated(self): self.master.db.insertTestData([ fakedb.Master(id=14, name='other', active=0, last_active=0), # set up a running build with some steps fakedb.Builder(id=77, name='b1'), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, builderid=77, buildsetid=8822), fakedb.BuildRequestClaim(brid=82, masterid=14, claimed_at=SOMETIME), fakedb.Build(id=13, builderid=77, masterid=14, workerid=13, buildrequestid=82, number=3, results=None), fakedb.Step(id=200, buildid=13), fakedb.Log(id=2000, stepid=200, num_lines=2), fakedb.LogChunk(logid=2000, first_line=1, last_line=2, content='ab\ncd') ]) # mock out the _masterDeactivated methods this will call for rtype in 'builder', 'scheduler', 'changesource': rtype_obj = getattr(self.master.data.rtypes, rtype) m = mock.Mock(name='%s._masterDeactivated' % rtype, spec=rtype_obj._masterDeactivated) m.side_effect = lambda masterid: defer.succeed(None) rtype_obj._masterDeactivated = m # and the update methods.. for meth in 'finishBuild', 'finishStep', 'finishLog': m = mock.create_autospec(getattr(self.master.data.updates, meth)) m.side_effect = lambda *args, **kwargs: defer.succeed(None) setattr(self.master.data.updates, meth, m) yield self.rtype._masterDeactivated(14, 'other') self.master.data.rtypes.builder._masterDeactivated. \ assert_called_with(masterid=14) self.master.data.rtypes.scheduler._masterDeactivated. \ assert_called_with(masterid=14) self.master.data.rtypes.changesource._masterDeactivated. \ assert_called_with(masterid=14) # see that we finished off that build and its steps and logs updates = self.master.data.updates updates.finishLog.assert_called_with(logid=2000) updates.finishStep.assert_called_with( stepid=200, results=RETRY, hidden=False) updates.finishBuild.assert_called_with(buildid=13, results=RETRY) self.assertEqual(self.master.mq.productions, [ (('masters', '14', 'stopped'), dict(masterid=14, name='other', active=False)), ]) buildbot-2.6.0/master/buildbot/test/unit/test_data_patches.py000066400000000000000000000022661361162603000244150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.data import patches from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class Patch(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = patches.Patch(self.master) # no update methods -> nothing to test buildbot-2.6.0/master/buildbot/test/unit/test_data_properties.py000066400000000000000000000143241361162603000251600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import properties from buildbot.process.properties import Properties as processProperties from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class BuildsetPropertiesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = properties.BuildsetPropertiesEndpoint resourceTypeClass = properties.Properties def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=13, reason='because I said so'), fakedb.SourceStamp(id=92), fakedb.SourceStamp(id=93), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=93), fakedb.Buildset(id=14, reason='no sourcestamps'), fakedb.BuildsetProperty(buildsetid=14) ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_properties(self): props = yield self.callGet(('buildsets', 14, 'properties')) self.assertEqual(props, {'prop': (22, 'fakedb')}) class BuildPropertiesEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = properties.BuildPropertiesEndpoint resourceTypeClass = properties.Properties def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Buildset(id=28), fakedb.BuildRequest(id=5, buildsetid=28), fakedb.Master(id=3), fakedb.Worker(id=42, name="Friday"), fakedb.Build(id=786, buildrequestid=5, masterid=3, workerid=42), fakedb.BuildProperty( buildid=786, name="year", value=1651, source="Wikipedia"), fakedb.BuildProperty( buildid=786, name="island_name", value="despair", source="Book"), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_properties(self): props = yield self.callGet(('builds', 786, 'properties')) self.assertEqual(props, {'year': (1651, 'Wikipedia'), 'island_name': ("despair", 'Book')}) class Properties(interfaces.InterfaceTests, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=False, wantDb=True, wantData=True) self.rtype = properties.Properties(self.master) @defer.inlineCallbacks def do_test_callthrough(self, dbMethodName, method, exp_args=None, exp_kwargs=None, *args, **kwargs): rv = (1, 2) m = mock.Mock(return_value=defer.succeed(rv)) setattr(self.master.db.builds, dbMethodName, m) res = yield method(*args, **kwargs) self.assertIdentical(res, rv) m.assert_called_with( *(exp_args or args), **((exp_kwargs is None) and kwargs or exp_kwargs)) def test_signature_setBuildProperty(self): @self.assertArgSpecMatches( self.master.data.updates.setBuildProperty, # fake self.rtype.setBuildProperty) # real def setBuildProperty(self, buildid, name, value, source): pass def test_setBuildProperty(self): return self.do_test_callthrough('setBuildProperty', self.rtype.setBuildProperty, buildid=1234, name='property', value=[42, 45], source='testsuite', exp_args=(1234, 'property', [42, 45], 'testsuite'), exp_kwargs={}) @defer.inlineCallbacks def test_setBuildProperties(self): self.master.db.insertTestData([ fakedb.Buildset(id=28), fakedb.BuildRequest(id=5, buildsetid=28), fakedb.Master(id=3), fakedb.Worker(id=42, name="Friday"), fakedb.Build(id=1234, buildrequestid=5, masterid=3, workerid=42), ]) self.master.db.builds.setBuildProperty = mock.Mock( wraps=self.master.db.builds.setBuildProperty) props = processProperties.fromDict( dict(a=(1, 't'), b=(['abc', 9], 't'))) yield self.rtype.setBuildProperties(1234, props) setBuildPropertiesCalls = sorted(self.master.db.builds.setBuildProperty.mock_calls) self.assertEqual(setBuildPropertiesCalls, [ mock.call(1234, 'a', 1, 't'), mock.call(1234, 'b', ['abc', 9], 't')]) self.master.mq.assertProductions([ (('builds', '1234', 'properties', 'update'), {'a': (1, 't'), 'b': (['abc', 9], 't')}), ]) # sync without changes: no db write self.master.db.builds.setBuildProperty.reset_mock() self.master.mq.clearProductions() yield self.rtype.setBuildProperties(1234, props) self.master.db.builds.setBuildProperty.assert_not_called() self.master.mq.assertProductions([]) # sync with one changes: one db write props.setProperty('b', 2, 'step') self.master.db.builds.setBuildProperty.reset_mock() yield self.rtype.setBuildProperties(1234, props) self.master.db.builds.setBuildProperty.assert_called_with( 1234, 'b', 2, 'step') self.master.mq.assertProductions([ (('builds', '1234', 'properties', 'update'), {'b': (2, 'step')}) ]) buildbot-2.6.0/master/buildbot/test/unit/test_data_resultspec.py000066400000000000000000000345151361162603000251610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import random from twisted.trial import unittest from buildbot.data import base from buildbot.data import resultspec from buildbot.data.resultspec import NoneComparator from buildbot.data.resultspec import ReverseComparator def mklist(fld, *values): if isinstance(fld, tuple): return [dict(zip(fld, val)) for val in values] return [{fld: val} for val in values] class Filter(unittest.TestCase): def test_eq(self): f = resultspec.Filter('num', 'eq', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10))), mklist('num', 10)) def test_eq_plural(self): f = resultspec.Filter('num', 'eq', [10, 15, 20]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 10, 15)) def test_ne(self): f = resultspec.Filter('num', 'ne', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10))), mklist('num', 5)) def test_ne_plural(self): f = resultspec.Filter('num', 'ne', [10, 15, 20]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5)) def test_lt(self): f = resultspec.Filter('num', 'lt', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5)) def test_le(self): f = resultspec.Filter('num', 'le', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 5, 10)) def test_gt(self): f = resultspec.Filter('num', 'gt', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 15)) def test_ge(self): f = resultspec.Filter('num', 'ge', [10]) self.assertEqual(list(f.apply(mklist('num', 5, 10, 15))), mklist('num', 10, 15)) def test_contains(self): f = resultspec.Filter('num', 'contains', [10]) self.assertEqual(list(f.apply(mklist('num', [5, 1], [10, 1], [15, 1]))), mklist('num', [10, 1])) def test_contains_plural(self): f = resultspec.Filter('num', 'contains', [10, 5]) self.assertEqual(list(f.apply(mklist('num', [5, 1], [10, 1], [15, 1]))), mklist('num', [5, 1], [10, 1])) class ResultSpec(unittest.TestCase): def assertListResultEqual(self, a, b): self.assertIsInstance(a, base.ListResult) self.assertIsInstance(b, base.ListResult) self.assertEqual(a, b) def test_apply_None(self): self.assertEqual(resultspec.ResultSpec().apply(None), None) def test_apply_details_fields(self): data = dict(name="clyde", id=14, favcolor="red") self.assertEqual( resultspec.ResultSpec(fields=['name']).apply(data), dict(name="clyde")) self.assertEqual( resultspec.ResultSpec(fields=['name', 'id']).apply(data), dict(name="clyde", id=14)) def test_apply_collection_fields(self): data = mklist(('a', 'b', 'c'), (1, 11, 111), (2, 22, 222)) self.assertEqual( resultspec.ResultSpec(fields=['a']).apply(data), mklist('a', 1, 2)) self.assertEqual( resultspec.ResultSpec(fields=['a', 'c']).apply(data), mklist(('a', 'c'), (1, 111), (2, 222))) def test_apply_ordering(self): data = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') exp = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') random.shuffle(data) self.assertEqual( resultspec.ResultSpec(order=['name']).apply(data), exp) self.assertEqual( resultspec.ResultSpec(order=['-name']).apply(data), list(reversed(exp))) def test_apply_ordering_multi(self): data = mklist(('fn', 'ln'), ('cedric', 'willis'), ('albert', 'engelbert'), ('bruce', 'willis'), ('dwayne', 'montague')) exp = base.ListResult(mklist(('fn', 'ln'), ('albert', 'engelbert'), ('dwayne', 'montague'), ('bruce', 'willis'), ('cedric', 'willis')), total=4) random.shuffle(data) self.assertListResultEqual( resultspec.ResultSpec(order=['ln', 'fn']).apply(data), exp) exp = base.ListResult(mklist(('fn', 'ln'), ('cedric', 'willis'), ('bruce', 'willis'), ('dwayne', 'montague'), ('albert', 'engelbert')), total=4) self.assertListResultEqual( resultspec.ResultSpec(order=['-ln', '-fn']).apply(data), exp) def test_apply_filter(self): data = mklist('name', 'albert', 'bruce', 'cedric', 'dwayne') f = resultspec.Filter(field='name', op='gt', values=['bruce']) self.assertListResultEqual( resultspec.ResultSpec(filters=[f]).apply(data), base.ListResult(mklist('name', 'cedric', 'dwayne'), total=2)) f2 = resultspec.Filter(field='name', op='le', values=['cedric']) self.assertListResultEqual( resultspec.ResultSpec(filters=[f, f2]).apply(data), base.ListResult(mklist('name', 'cedric'), total=1)) def test_apply_missing_fields(self): data = mklist(('fn', 'ln'), ('cedric', 'willis'), ('albert', 'engelbert'), ('bruce', 'willis'), ('dwayne', 'montague')) # note that the REST interface catches this with a nicer error message with self.assertRaises(KeyError): resultspec.ResultSpec(fields=['fn'], order=['ln']).apply(data) def test_sort_null_datetimefields(self): data = mklist(('fn', 'ln'), ('albert', datetime.datetime(1, 1, 1)), ('cedric', None)) exp = mklist(('fn', 'ln'), ('cedric', None), ('albert', datetime.datetime(1, 1, 1))) self.assertListResultEqual( resultspec.ResultSpec(order=['ln']).apply(data), base.ListResult(exp, total=2)) def do_test_pagination(self, bareList): data = mklist('x', *list(range(101, 131))) if not bareList: data = base.ListResult(data) data.offset = None data.total = len(data) data.limit = None self.assertListResultEqual( resultspec.ResultSpec(offset=0).apply(data), base.ListResult(mklist('x', *list(range(101, 131))), offset=0, total=30)) self.assertListResultEqual( resultspec.ResultSpec(offset=10).apply(data), base.ListResult(mklist('x', *list(range(111, 131))), offset=10, total=30)) self.assertListResultEqual( resultspec.ResultSpec(offset=10, limit=10).apply(data), base.ListResult(mklist('x', *list(range(111, 121))), offset=10, total=30, limit=10)) self.assertListResultEqual( resultspec.ResultSpec(offset=20, limit=15).apply(data), base.ListResult(mklist('x', *list(range(121, 131))), offset=20, total=30, limit=15)) # off the end def test_pagination_bare_list(self): return self.do_test_pagination(bareList=True) def test_pagination_ListResult(self): return self.do_test_pagination(bareList=False) def test_pagination_prepaginated(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.offset = 10 data.total = 30 data.limit = 10 self.assertListResultEqual( # ResultSpec has its offset/limit fields cleared resultspec.ResultSpec().apply(data), base.ListResult(mklist('x', *list(range(10, 20))), offset=10, total=30, limit=10)) def test_pagination_prepaginated_without_clearing_resultspec(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.offset = 10 data.limit = 10 # ResultSpec does not have its offset/limit fields cleared - this is # detected as an assertion failure with self.assertRaises(AssertionError): resultspec.ResultSpec(offset=10, limit=20).apply(data) def test_endpoint_returns_total_without_applying_filters(self): data = base.ListResult(mklist('x', *list(range(10, 20)))) data.total = 99 # apply doesn't want to get a total with filters still outstanding f = resultspec.Filter(field='x', op='gt', values=[23]) with self.assertRaises(AssertionError): resultspec.ResultSpec(filters=[f]).apply(data) def test_popProperties(self): expected = ['prop1', 'prop2'] rs = resultspec.ResultSpec(properties=[ resultspec.Property(b'property', 'eq', expected) ]) self.assertEqual(len(rs.properties), 1) self.assertEqual(rs.popProperties(), expected) self.assertEqual(len(rs.properties), 0) def test_popFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', [10]), resultspec.Filter('foo', 'gt', [5]), resultspec.Filter('base', 'ne', [20]), ]) self.assertEqual(rs.popFilter('baz', 'lt'), None) # no match self.assertEqual(rs.popFilter('foo', 'eq'), [10]) self.assertEqual(len(rs.filters), 2) def test_popBooleanFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', [True]), resultspec.Filter('bar', 'ne', [False]), ]) self.assertEqual(rs.popBooleanFilter('foo'), True) self.assertEqual(rs.popBooleanFilter('bar'), True) self.assertEqual(len(rs.filters), 0) def test_popStringFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['foo']), ]) self.assertEqual(rs.popStringFilter('foo'), 'foo') def test_popStringFilterSeveral(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['foo', 'bar']), ]) self.assertEqual(rs.popStringFilter('foo'), None) def test_popIntegerFilter(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['12']), ]) self.assertEqual(rs.popIntegerFilter('foo'), 12) def test_popIntegerFilterSeveral(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['12', '13']), ]) self.assertEqual(rs.popIntegerFilter('foo'), None) def test_popIntegerFilterNotInt(self): rs = resultspec.ResultSpec(filters=[ resultspec.Filter('foo', 'eq', ['bar']), ]) with self.assertRaises(ValueError): rs.popIntegerFilter('foo') def test_removeOrder(self): rs = resultspec.ResultSpec(order=['foo', '-bar']) rs.removeOrder() self.assertEqual(rs.order, None) def test_popField(self): rs = resultspec.ResultSpec(fields=['foo', 'bar']) self.assertTrue(rs.popField('foo')) self.assertEqual(rs.fields, ['bar']) def test_popField_not_present(self): rs = resultspec.ResultSpec(fields=['foo', 'bar']) self.assertFalse(rs.popField('nosuch')) self.assertEqual(rs.fields, ['foo', 'bar']) class Comparator(unittest.TestCase): def test_noneComparator(self): self.assertNotEqual(NoneComparator(None), NoneComparator(datetime.datetime(1, 1, 1))) self.assertNotEqual(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(None)) self.assertLess(NoneComparator(None), NoneComparator(datetime.datetime(1, 1, 1))) self.assertGreater(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(None)) self.assertLess(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(datetime.datetime(1, 1, 2))) self.assertEqual(NoneComparator(datetime.datetime(1, 1, 1)), NoneComparator(datetime.datetime(1, 1, 1))) self.assertGreater(NoneComparator(datetime.datetime(1, 1, 2)), NoneComparator(datetime.datetime(1, 1, 1))) self.assertEqual(NoneComparator(None), NoneComparator(None)) def test_noneComparison(self): noneInList = ["z", None, None, "q", "a", None, "v"] sortedList = sorted(noneInList, key=NoneComparator) self.assertEqual(sortedList, [None, None, None, "a", "q", "v", "z"]) def test_reverseComparator(self): reverse35 = ReverseComparator(35) reverse36 = ReverseComparator(36) self.assertEqual(reverse35, reverse35) self.assertNotEqual(reverse35, reverse36) self.assertLess(reverse36, reverse35) self.assertGreater(reverse35, reverse36) self.assertLess(reverse36, reverse35) def test_reverseComparison(self): nums = [1, 2, 3, 4, 5] nums.sort(key=ReverseComparator) self.assertEqual(nums, [5, 4, 3, 2, 1]) def test_reverseComparisonWithNone(self): noneInList = ["z", None, None, "q", "a", None, "v"] sortedList = sorted(noneInList, key=lambda x: ReverseComparator(NoneComparator(x))) self.assertEqual(sortedList, ["z", "v", "q", "a", None, None, None]) buildbot-2.6.0/master/buildbot/test/unit/test_data_root.py000066400000000000000000000071341361162603000237500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import connector from buildbot.data import root from buildbot.test.util import endpoint class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = root.RootEndpoint resourceTypeClass = root.Root def setUp(self): self.setUpEndpoint() self.master.data.rootLinks = [ {'name': 'abc'}, ] def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): rootlinks = yield self.callGet(('',)) [self.validateData(root) for root in rootlinks] self.assertEqual(rootlinks, [ {'name': 'abc'}, ]) class SpecEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = root.SpecEndpoint resourceTypeClass = root.Spec @defer.inlineCallbacks def setUp(self): self.setUpEndpoint() # replace fakeConnector with real DataConnector self.master.data.disownServiceParent() self.master.data = connector.DataConnector() yield self.master.data.setServiceParent(self.master) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): specs = yield self.callGet(('application.spec',)) [self.validateData(s) for s in specs] for s in specs: # only test an endpoint that is reasonably stable if s['path'] != "master": continue self.assertEqual(s, {'path': 'master', 'type': 'master', 'type_spec': {'fields': [{'name': 'active', 'type': 'boolean', 'type_spec': {'name': 'boolean'}}, {'name': 'masterid', 'type': 'integer', 'type_spec': {'name': 'integer'}}, {'name': 'link', 'type': 'link', 'type_spec': {'name': 'link'}}, {'name': 'name', 'type': 'string', 'type_spec': {'name': 'string'}}, {'name': 'last_active', 'type': 'datetime', 'type_spec': {'name': 'datetime'}}], 'type': 'master'}, 'plural': 'masters'}) buildbot-2.6.0/master/buildbot/test/unit/test_data_schedulers.py000066400000000000000000000223551361162603000251300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot.data import schedulers from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime class SchedulerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = schedulers.SchedulerEndpoint resourceTypeClass = schedulers.Scheduler def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=None), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), fakedb.Scheduler(id=15, name='another:scheduler'), fakedb.SchedulerMaster(schedulerid=15, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): scheduler = yield self.callGet(('schedulers', 14)) self.validateData(scheduler) self.assertEqual(scheduler['name'], 'other:scheduler') @defer.inlineCallbacks def test_get_no_master(self): scheduler = yield self.callGet(('schedulers', 13)) self.validateData(scheduler) self.assertEqual(scheduler['master'], None), @defer.inlineCallbacks def test_get_masterid_existing(self): scheduler = yield self.callGet(('masters', 22, 'schedulers', 14)) self.validateData(scheduler) self.assertEqual(scheduler['name'], 'other:scheduler') @defer.inlineCallbacks def test_get_masterid_no_match(self): scheduler = yield self.callGet(('masters', 33, 'schedulers', 13)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_get_masterid_missing(self): scheduler = yield self.callGet(('masters', 99, 'schedulers', 13)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_get_missing(self): scheduler = yield self.callGet(('schedulers', 99)) self.assertEqual(scheduler, None) @defer.inlineCallbacks def test_action_enable(self): r = yield self.callControl("enable", {'enabled': False}, ('schedulers', 13)) self.assertEqual(r, None) class SchedulersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = schedulers.SchedulersEndpoint resourceTypeClass = schedulers.Scheduler def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Master(id=33, active=1), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=None), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), fakedb.Scheduler(id=15, name='another:scheduler'), fakedb.SchedulerMaster(schedulerid=15, masterid=33), fakedb.Scheduler(id=16, name='wholenother:scheduler'), fakedb.SchedulerMaster(schedulerid=16, masterid=33), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): schedulers = yield self.callGet(('schedulers',)) [self.validateData(m) for m in schedulers] self.assertEqual(sorted([m['schedulerid'] for m in schedulers]), [13, 14, 15, 16]) @defer.inlineCallbacks def test_get_masterid(self): schedulers = yield self.callGet(('masters', 33, 'schedulers')) [self.validateData(m) for m in schedulers] self.assertEqual(sorted([m['schedulerid'] for m in schedulers]), [15, 16]) @defer.inlineCallbacks def test_get_masterid_missing(self): schedulers = yield self.callGet(('masters', 23, 'schedulers')) self.assertEqual(schedulers, []) class Scheduler(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = schedulers.Scheduler(self.master) def test_signature_schedulerEnable(self): @self.assertArgSpecMatches( self.master.data.updates.schedulerEnable, self.rtype.schedulerEnable) def schedulerEnable(self, schedulerid, v): pass @defer.inlineCallbacks def test_schedulerEnable(self): SOMETIME = 1348971992 yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0, last_active=SOMETIME), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=22), ]) yield self.rtype.schedulerEnable(13, False) self.master.mq.assertProductions( [(('schedulers', '13', 'updated'), {'enabled': False, 'master': {'active': False, 'last_active': epoch2datetime(SOMETIME), 'masterid': 22, 'name': 'some:master'}, 'name': 'some:scheduler', 'schedulerid': 13})]) yield self.rtype.schedulerEnable(13, True) self.master.mq.assertProductions( [(('schedulers', '13', 'updated'), {'enabled': True, 'master': {'active': False, 'last_active': epoch2datetime(SOMETIME), 'masterid': 22, 'name': 'some:master'}, 'name': 'some:scheduler', 'schedulerid': 13})]) def test_signature_findSchedulerId(self): @self.assertArgSpecMatches( self.master.data.updates.findSchedulerId, # fake self.rtype.findSchedulerId) # real def findSchedulerId(self, name): pass @defer.inlineCallbacks def test_findSchedulerId(self): self.master.db.schedulers.findSchedulerId = mock.Mock( return_value=defer.succeed(10)) self.assertEqual((yield self.rtype.findSchedulerId('sch')), 10) self.master.db.schedulers.findSchedulerId.assert_called_with('sch') def test_signature_trySetSchedulerMaster(self): @self.assertArgSpecMatches( self.master.data.updates.trySetSchedulerMaster, # fake self.rtype.trySetSchedulerMaster) # real def trySetSchedulerMaster(self, schedulerid, masterid): pass @defer.inlineCallbacks def test_trySetSchedulerMaster_succeeds(self): self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=defer.succeed(None)) result = yield self.rtype.trySetSchedulerMaster(10, 20) self.assertTrue(result) self.master.db.schedulers.setSchedulerMaster.assert_called_with(10, 20) @defer.inlineCallbacks def test_trySetSchedulerMaster_fails(self): d = defer.fail(failure.Failure( schedulers.SchedulerAlreadyClaimedError('oh noes'))) self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=d) result = yield self.rtype.trySetSchedulerMaster(10, 20) self.assertFalse(result) @defer.inlineCallbacks def test_trySetSchedulerMaster_raisesOddException(self): d = defer.fail(failure.Failure(RuntimeError('oh noes'))) self.master.db.schedulers.setSchedulerMaster = mock.Mock( return_value=d) try: yield self.rtype.trySetSchedulerMaster(10, 20) except RuntimeError: pass else: self.fail("The RuntimeError did not propagate") @defer.inlineCallbacks def test__masterDeactivated(self): yield self.master.db.insertTestData([ fakedb.Master(id=22, active=0), fakedb.Scheduler(id=13, name='some:scheduler'), fakedb.SchedulerMaster(schedulerid=13, masterid=22), fakedb.Scheduler(id=14, name='other:scheduler'), fakedb.SchedulerMaster(schedulerid=14, masterid=22), ]) yield self.rtype._masterDeactivated(22) self.master.db.schedulers.assertSchedulerMaster(13, None) self.master.db.schedulers.assertSchedulerMaster(14, None) buildbot-2.6.0/master/buildbot/test/unit/test_data_sourcestamps.py000066400000000000000000000061301361162603000255100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import sourcestamps from buildbot.test.fake import fakedb from buildbot.test.util import endpoint class SourceStampEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = sourcestamps.SourceStampEndpoint resourceTypeClass = sourcestamps.SourceStamp def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=13, branch='oak'), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=14, patchid=99, branch='poplar'), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): sourcestamp = yield self.callGet(('sourcestamps', 13)) self.validateData(sourcestamp) self.assertEqual(sourcestamp['branch'], 'oak') self.assertEqual(sourcestamp['patch'], None) @defer.inlineCallbacks def test_get_existing_patch(self): sourcestamp = yield self.callGet(('sourcestamps', 14)) self.validateData(sourcestamp) self.assertEqual(sourcestamp['branch'], 'poplar') self.assertEqual(sourcestamp['patch'], { 'patchid': 99, 'author': 'bar', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'subdir': '/foo', }) @defer.inlineCallbacks def test_get_missing(self): sourcestamp = yield self.callGet(('sourcestamps', 99)) self.assertEqual(sourcestamp, None) class SourceStampsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = sourcestamps.SourceStampsEndpoint resourceTypeClass = sourcestamps.SourceStamp def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.SourceStamp(id=13), fakedb.SourceStamp(id=14), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): sourcestamps = yield self.callGet(('sourcestamps',)) [self.validateData(m) for m in sourcestamps] self.assertEqual(sorted([m['ssid'] for m in sourcestamps]), [13, 14]) class SourceStamp(unittest.TestCase): pass buildbot-2.6.0/master/buildbot/test/unit/test_data_steps.py000066400000000000000000000340121361162603000241160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import steps from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime TIME1 = 2001111 TIME2 = 2002222 TIME3 = 2003333 class StepEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = steps.StepEndpoint resourceTypeClass = steps.Step def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=30, builderid=77, number=7, masterid=88, buildrequestid=82, workerid=47), fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, results=2, urls_json='[{"name":"url","url":"http://url"}]'), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3, hidden=True), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): step = yield self.callGet(('steps', 72)) self.validateData(step) self.assertEqual(step, { 'buildid': 30, 'complete': False, 'complete_at': None, 'name': 'three', 'number': 2, 'results': None, 'started_at': epoch2datetime(TIME3), 'state_string': '', 'stepid': 72, 'urls': [], 'hidden': True}) @defer.inlineCallbacks def test_get_existing_buildid_name(self): step = yield self.callGet(('builds', 30, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_buildid_number(self): step = yield self.callGet(('builds', 30, 'steps', 1)) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_builder_name(self): step = yield self.callGet(('builders', 77, 'builds', 7, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_buildername_name(self): step = yield self.callGet(('builders', 'builder77', 'builds', 7, 'steps', 'two')) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_existing_builder_number(self): step = yield self.callGet(('builders', 77, 'builds', 7, 'steps', 1)) self.validateData(step) self.assertEqual(step['stepid'], 71) @defer.inlineCallbacks def test_get_missing_buildername_builder_number(self): step = yield self.callGet(('builders', 'builder77_nope', 'builds', 7, 'steps', 1)) self.assertEqual(step, None) @defer.inlineCallbacks def test_get_missing(self): step = yield self.callGet(('steps', 9999)) self.assertEqual(step, None) class StepsEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = steps.StepsEndpoint resourceTypeClass = steps.Step def setUp(self): self.setUpEndpoint() self.db.insertTestData([ fakedb.Worker(id=47, name='linux'), fakedb.Builder(id=77, name='builder77'), fakedb.Master(id=88), fakedb.Buildset(id=8822), fakedb.BuildRequest(id=82, buildsetid=8822), fakedb.Build(id=30, builderid=77, number=7, masterid=88, buildrequestid=82, workerid=47), fakedb.Build(id=31, builderid=77, number=8, masterid=88, buildrequestid=82, workerid=47), fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, results=2, urls_json='[{"name":"url","url":"http://url"}]'), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3), fakedb.Step(id=73, number=0, name='otherbuild', buildid=31, started_at=TIME2), ]) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_buildid(self): steps = yield self.callGet(('builds', 30, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) @defer.inlineCallbacks def test_get_builder(self): steps = yield self.callGet(('builders', 77, 'builds', 7, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) @defer.inlineCallbacks def test_get_buildername(self): steps = yield self.callGet(('builders', 'builder77', 'builds', 7, 'steps')) [self.validateData(step) for step in steps] self.assertEqual([s['number'] for s in steps], [0, 1, 2]) class Step(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = steps.Step(self.master) def test_signature_newStep(self): @self.assertArgSpecMatches( self.master.data.updates.addStep, # fake self.rtype.addStep) # real def newStep(self, buildid, name): pass @defer.inlineCallbacks def test_newStep(self): stepid, number, name = yield self.rtype.addStep(buildid=10, name='name') msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': name, 'number': number, 'results': None, 'started_at': None, 'state_string': 'pending', 'stepid': stepid, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(stepid), 'new'), msgBody), (('steps', str(stepid), 'new'), msgBody), ]) step = yield self.master.db.steps.getStep(stepid) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': stepid, 'name': name, 'number': number, 'results': None, 'started_at': None, 'state_string': 'pending', 'urls': [], 'hidden': False, }) @defer.inlineCallbacks def test_fake_newStep(self): self.assertEqual( len((yield self.master.data.updates.addStep(buildid=10, name='ten'))), 3) def test_signature_startStep(self): @self.assertArgSpecMatches( self.master.data.updates.startStep, # fake self.rtype.startStep) # real def newStep(self, stepid): pass @defer.inlineCallbacks def test_startStep(self): self.reactor.advance(TIME1) yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.startStep(stepid=100) msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'started'), msgBody), (('steps', str(100), 'started'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'urls': [], 'hidden': False, }) def test_signature_setStepStateString(self): @self.assertArgSpecMatches( self.master.data.updates.setStepStateString, # fake self.rtype.setStepStateString) # real def setStepStateString(self, stepid, state_string): pass @defer.inlineCallbacks def test_setStepStateString(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.setStepStateString(stepid=100, state_string='hi') msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'hi', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'updated'), msgBody), (('steps', str(100), 'updated'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'hi', 'urls': [], 'hidden': False, }) def test_signature_finishStep(self): @self.assertArgSpecMatches( self.master.data.updates.finishStep, # fake self.rtype.finishStep) # real def finishStep(self, stepid, results, hidden): pass @defer.inlineCallbacks def test_finishStep(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') self.reactor.advance(TIME1) yield self.rtype.startStep(stepid=100) self.reactor.advance(TIME2 - TIME1) self.master.mq.clearProductions() yield self.rtype.finishStep(stepid=100, results=9, hidden=False) msgBody = { 'buildid': 10, 'complete': True, 'complete_at': epoch2datetime(TIME2), 'name': 'ten', 'number': 0, 'results': 9, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'stepid': 100, 'urls': [], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'finished'), msgBody), (('steps', str(100), 'finished'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': epoch2datetime(TIME2), 'id': 100, 'name': 'ten', 'number': 0, 'results': 9, 'started_at': epoch2datetime(TIME1), 'state_string': 'pending', 'urls': [], 'hidden': False, }) def test_signature_addStepURL(self): @self.assertArgSpecMatches( self.master.data.updates.addStepURL, # fake self.rtype.addStepURL) # real def addStepURL(self, stepid, name, url): pass @defer.inlineCallbacks def test_addStepURL(self): yield self.master.db.steps.addStep(buildid=10, name='ten', state_string='pending') yield self.rtype.addStepURL(stepid=100, name="foo", url="bar") msgBody = { 'buildid': 10, 'complete': False, 'complete_at': None, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'pending', 'stepid': 100, 'urls': [{'name': 'foo', 'url': 'bar'}], 'hidden': False, } self.master.mq.assertProductions([ (('builds', '10', 'steps', str(100), 'updated'), msgBody), (('steps', str(100), 'updated'), msgBody), ]) step = yield self.master.db.steps.getStep(100) self.assertEqual(step, { 'buildid': 10, 'complete_at': None, 'id': 100, 'name': 'ten', 'number': 0, 'results': None, 'started_at': None, 'state_string': 'pending', 'urls': [{'name': 'foo', 'url': 'bar'}], 'hidden': False, }) buildbot-2.6.0/master/buildbot/test/unit/test_data_types.py000066400000000000000000000120341361162603000241240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.data import types class TypeMixin: klass = None good = [] bad = [] stringValues = [] badStringValues = [] cmpResults = [] def setUp(self): self.ty = self.makeInstance() def makeInstance(self): return self.klass() def test_valueFromString(self): for string, expValue in self.stringValues: self.assertEqual(self.ty.valueFromString(string), expValue, "value of string %r" % (string,)) for string in self.badStringValues: with self.assertRaises(Exception): self.ty.valueFromString(string, "expected error for %r" % (string,)) def test_cmp(self): for val, string, expResult in self.cmpResults: self.assertEqual(self.ty.cmp(val, string), expResult, "compare of %r and %r" % (val, string)) def test_validate(self): for o in self.good: errors = list(self.ty.validate(repr(o), o)) self.assertEqual(errors, [], "%s -> %s" % (repr(o), errors)) for o in self.bad: errors = list(self.ty.validate(repr(o), o)) self.assertNotEqual(errors, [], "no error for %s" % (repr(o),)) class NoneOk(TypeMixin, unittest.TestCase): def makeInstance(self): return types.NoneOk(types.Integer()) good = [None, 1] bad = ['abc'] stringValues = [('0', 0), ('-10', -10)] badStringValues = ['one', '', '0x10'] cmpResults = [(10, '9', 1), (-2, '-1', -1)] class Integer(TypeMixin, unittest.TestCase): klass = types.Integer good = [0, -1, 1000, 100 ** 100] bad = [None, '', '0'] stringValues = [('0', 0), ('-10', -10)] badStringValues = ['one', '', '0x10'] cmpResults = [(10, '9', 1), (-2, '-1', -1)] class String(TypeMixin, unittest.TestCase): klass = types.String good = ['', 'hello', '\N{SNOWMAN}'] bad = [None, b'', b'hello', 10] stringValues = [ (b'hello', 'hello'), ('\N{SNOWMAN}'.encode('utf-8'), '\N{SNOWMAN}'), ] badStringValues = ['\xe0\xe0'] cmpResults = [('bbb', 'aaa', 1)] class Binary(TypeMixin, unittest.TestCase): klass = types.Binary good = [b'', b'\x01\x80\xfe', '\N{SNOWMAN}'.encode('utf-8')] bad = [None, 10, 'xyz'] stringValues = [('hello', 'hello')] cmpResults = [('\x00\x80', '\x10\x10', -1)] class Boolean(TypeMixin, unittest.TestCase): klass = types.Boolean good = [True, False] bad = [None, 0, 1] stringValues = [ (b'on', True), (b'true', True), (b'yes', True), (b'1', True), (b'off', False), (b'false', False), (b'no', False), (b'0', False), (b'ON', True), (b'TRUE', True), (b'YES', True), (b'OFF', False), (b'FALSE', False), (b'NO', False), ] cmpResults = [ (False, b'no', 0), (True, b'true', 0), ] class Identifier(TypeMixin, unittest.TestCase): def makeInstance(self): return types.Identifier(len=5) good = ['a', 'abcde', 'a1234'] bad = ['', 'abcdef', b'abcd', '1234', '\N{SNOWMAN}'] stringValues = [ (b'abcd', 'abcd'), ] badStringValues = [ b'', r'\N{SNOWMAN}', b'abcdef' ] cmpResults = [ ('aaaa', b'bbbb', -1), ] class List(TypeMixin, unittest.TestCase): def makeInstance(self): return types.List(of=types.Integer()) good = [[], [1], [1, 2]] bad = [1, (1,), ['1']] badStringValues = [ '1', '1,2' ] class SourcedProperties(TypeMixin, unittest.TestCase): klass = types.SourcedProperties good = [{'p': (b'["a"]', 's')}] bad = [ None, (), [], {b'not-unicode': ('["a"]', 'unicode')}, {'unicode': ('["a"]', b'not-unicode')}, {'unicode': ('not, json', 'unicode')}, ] class Entity(TypeMixin, unittest.TestCase): class MyEntity(types.Entity): field1 = types.Integer() field2 = types.NoneOk(types.String()) def makeInstance(self): return self.MyEntity('myentity') good = [ {'field1': 1, 'field2': 'f2'}, {'field1': 1, 'field2': None}, ] bad = [ None, [], (), {'field1': 1}, {'field1': 1, 'field2': 'f2', 'field3': 10}, {'field1': 'one', 'field2': 'f2'}, ] buildbot-2.6.0/master/buildbot/test/unit/test_data_workers.py000066400000000000000000000244251361162603000244630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.data import workers from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import endpoint from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin testData = [ fakedb.Builder(id=40, name='b1'), fakedb.Builder(id=41, name='b2'), fakedb.Master(id=13), fakedb.Master(id=14), fakedb.BuilderMaster(id=4013, builderid=40, masterid=13), fakedb.BuilderMaster(id=4014, builderid=40, masterid=14), fakedb.BuilderMaster(id=4113, builderid=41, masterid=13), fakedb.Worker(id=1, name='linux', info={}), fakedb.ConfiguredWorker(id=14013, workerid=1, buildermasterid=4013), fakedb.ConfiguredWorker(id=14014, workerid=1, buildermasterid=4014), fakedb.ConnectedWorker(id=113, masterid=13, workerid=1), fakedb.Worker(id=2, name='windows', info={"a": "b"}), fakedb.ConfiguredWorker(id=24013, workerid=2, buildermasterid=4013), fakedb.ConfiguredWorker(id=24014, workerid=2, buildermasterid=4014), fakedb.ConfiguredWorker(id=24113, workerid=2, buildermasterid=4113), fakedb.ConnectedWorker(id=214, masterid=14, workerid=2), ] def configuredOnKey(worker): return (worker.get('masterid', 0), worker.get('builderid', 0)) def _filt(bs, builderid, masterid): bs['connected_to'] = sorted( [d for d in bs['connected_to'] if not masterid or masterid == d['masterid']]) bs['configured_on'] = sorted( [d for d in bs['configured_on'] if (not masterid or masterid == d['masterid']) and (not builderid or builderid == d['builderid'])], key=configuredOnKey) return bs def w1(builderid=None, masterid=None): return _filt({ 'workerid': 1, 'name': 'linux', 'workerinfo': {}, 'paused': False, 'graceful': False, 'connected_to': [ {'masterid': 13}, ], 'configured_on': sorted([ {'builderid': 40, 'masterid': 13}, {'builderid': 40, 'masterid': 14}, ], key=configuredOnKey), }, builderid, masterid) def w2(builderid=None, masterid=None): return _filt({ 'workerid': 2, 'name': 'windows', 'workerinfo': {'a': 'b'}, 'paused': False, 'graceful': False, 'connected_to': [ {'masterid': 14}, ], 'configured_on': sorted([ {'builderid': 40, 'masterid': 13}, {'builderid': 41, 'masterid': 13}, {'builderid': 40, 'masterid': 14}, ], key=configuredOnKey), }, builderid, masterid) class WorkerEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = workers.WorkerEndpoint resourceTypeClass = workers.Worker def setUp(self): self.setUpEndpoint() return self.db.insertTestData(testData) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get_existing(self): worker = yield self.callGet(('workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2()) @defer.inlineCallbacks def test_get_existing_name(self): worker = yield self.callGet(('workers', 'linux')) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w1()) @defer.inlineCallbacks def test_get_existing_masterid(self): worker = yield self.callGet(('masters', 14, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(masterid=14)) @defer.inlineCallbacks def test_get_existing_builderid(self): worker = yield self.callGet(('builders', 40, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(builderid=40)) @defer.inlineCallbacks def test_get_existing_masterid_builderid(self): worker = yield self.callGet(('masters', 13, 'builders', 40, 'workers', 2)) self.validateData(worker) worker['configured_on'] = sorted( worker['configured_on'], key=configuredOnKey) self.assertEqual(worker, w2(masterid=13, builderid=40)) @defer.inlineCallbacks def test_get_missing(self): worker = yield self.callGet(('workers', 99)) self.assertEqual(worker, None) @defer.inlineCallbacks def test_setWorkerState(self): yield self.master.data.updates.setWorkerState(2, True, False) worker = yield self.callGet(('workers', 2)) self.validateData(worker) self.assertEqual(worker['paused'], True) @defer.inlineCallbacks def test_actions(self): for action in ("stop", "pause", "unpause", "kill"): yield self.callControl(action, {}, ('masters', 13, 'builders', 40, 'workers', 2)) self.master.mq.assertProductions( [(('control', 'worker', '2', action), {'reason': 'no reason'})]) @defer.inlineCallbacks def test_bad_actions(self): with self.assertRaises(exceptions.InvalidControlException): yield self.callControl("bad_action", {}, ('masters', 13, 'builders', 40, 'workers', 2)) class WorkersEndpoint(endpoint.EndpointMixin, unittest.TestCase): endpointClass = workers.WorkersEndpoint resourceTypeClass = workers.Worker def setUp(self): self.setUpEndpoint() return self.db.insertTestData(testData) def tearDown(self): self.tearDownEndpoint() @defer.inlineCallbacks def test_get(self): workers = yield self.callGet(('workers',)) for b in workers: self.validateData(b) b['configured_on'] = sorted(b['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w1(), w2()], key=configuredOnKey)) @defer.inlineCallbacks def test_get_masterid(self): workers = yield self.callGet(('masters', '13', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w1(masterid=13), w2(masterid=13)], key=configuredOnKey)) @defer.inlineCallbacks def test_get_builderid(self): workers = yield self.callGet(('builders', '41', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w2(builderid=41)], key=configuredOnKey)) @defer.inlineCallbacks def test_get_masterid_builderid(self): workers = yield self.callGet(('masters', '13', 'builders', '41', 'workers',)) [self.validateData(b) for b in workers] [sorted(b['configured_on'], key=configuredOnKey) for b in workers] self.assertEqual(sorted(workers, key=configuredOnKey), sorted([w2(masterid=13, builderid=41)], key=configuredOnKey)) @defer.inlineCallbacks def test_setWorkerStateFindByPaused(self): yield self.master.data.updates.setWorkerState(2, True, False) resultSpec = resultspec.OptimisedResultSpec( filters=[resultspec.Filter('paused', 'eq', [True])]) workers = yield self.callGet(('workers',), resultSpec=resultSpec) print(workers) self.assertEqual(len(workers), 1) worker = workers[0] self.validateData(worker) self.assertEqual(worker['paused'], True) class Worker(TestReactorMixin, interfaces.InterfaceTests, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.rtype = workers.Worker(self.master) return self.master.db.insertTestData([ fakedb.Master(id=13), fakedb.Master(id=14), ]) def test_signature_findWorkerId(self): @self.assertArgSpecMatches( self.master.data.updates.findWorkerId, # fake self.rtype.findWorkerId) # real def findWorkerId(self, name): pass def test_signature_workerConfigured(self): @self.assertArgSpecMatches( self.master.data.updates.workerConfigured, # fake self.rtype.workerConfigured) # real def workerConfigured(self, workerid, masterid, builderids): pass def test_findWorkerId(self): # this just passes through to the db method, so test that rv = defer.succeed(None) self.master.db.workers.findWorkerId = \ mock.Mock(return_value=rv) self.assertIdentical(self.rtype.findWorkerId('foo'), rv) def test_findWorkerId_not_id(self): with self.assertRaises(ValueError): self.rtype.findWorkerId(b'foo') with self.assertRaises(ValueError): self.rtype.findWorkerId('123/foo') buildbot-2.6.0/master/buildbot/test/unit/test_db_base.py000066400000000000000000000170411361162603000233510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import sqlalchemy as sa import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import base from buildbot.test.fake import fakedb from buildbot.test.util import connector_component from buildbot.util import sautils class TestBase(unittest.TestCase): def setUp(self): meta = sa.MetaData() self.tbl = sautils.Table('tbl', meta, sa.Column('str32', sa.String(length=32)), sa.Column('txt', sa.Text)) self.db = mock.Mock() self.db.pool.engine.dialect.name = 'mysql' self.comp = base.DBConnectorComponent(self.db) def test_checkLength_ok(self): self.comp.checkLength(self.tbl.c.str32, "short string") def test_checkLength_long(self): with self.assertRaises(RuntimeError): self.comp.checkLength(self.tbl.c.str32, ("long string" * 5)) def test_ensureLength_ok(self): v = self.comp.ensureLength(self.tbl.c.str32, "short string") self.assertEqual(v, "short string") def test_ensureLength_long(self): v = self.comp.ensureLength(self.tbl.c.str32, "short string" * 5) self.assertEqual(v, "short stringshordacf5a81f8ae3873") self.comp.checkLength(self.tbl.c.str32, v) def test_checkLength_text(self): with self.assertRaises(AssertionError): self.comp.checkLength(self.tbl.c.txt, ("long string" * 5)) def test_checkLength_long_not_mysql(self): self.db.pool.engine.dialect.name = 'sqlite' self.comp.checkLength(self.tbl.c.str32, "long string" * 5) # run that again since the method gets stubbed out self.comp.checkLength(self.tbl.c.str32, "long string" * 5) def _sha1(self, s): return hashlib.sha1(s).hexdigest() def test_hashColumns_single(self): self.assertEqual(self.comp.hashColumns('master'), self._sha1(b'master')) def test_hashColumns_multiple(self): self.assertEqual(self.comp.hashColumns('a', None, 'b', 1), self._sha1(b'a\0\xf5\x00b\x001')) def test_hashColumns_None(self): self.assertEqual(self.comp.hashColumns(None), self._sha1(b'\xf5')) def test_hashColumns_integer(self): self.assertEqual(self.comp.hashColumns(11), self._sha1(b'11')) def test_hashColumns_unicode_ascii_match(self): self.assertEqual(self.comp.hashColumns('master'), self.comp.hashColumns('master')) class TestBaseAsConnectorComponent(unittest.TestCase, connector_component.ConnectorComponentMixin): @defer.inlineCallbacks def setUp(self): # this co-opts the masters table to test findSomethingId yield self.setUpConnectorComponent( table_names=['masters']) self.db.base = base.DBConnectorComponent(self.db) @defer.inlineCallbacks def test_findSomethingId_race(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() def race_thd(conn): conn.execute(tbl.insert(), id=5, name='somemaster', name_hash=hash, active=1, last_active=1) id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1), _race_hook=race_thd) self.assertEqual(id, 5) @defer.inlineCallbacks def test_findSomethingId_new(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1)) self.assertEqual(id, 1) @defer.inlineCallbacks def test_findSomethingId_existing(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() yield self.insertTestData([ fakedb.Master(id=7, name='somemaster', name_hash=hash), ]) id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1)) self.assertEqual(id, 7) @defer.inlineCallbacks def test_findSomethingId_new_noCreate(self): tbl = self.db.model.masters hash = hashlib.sha1(b'somemaster').hexdigest() id = yield self.db.base.findSomethingId( tbl=self.db.model.masters, whereclause=(tbl.c.name_hash == hash), insert_values=dict(name='somemaster', name_hash=hash, active=1, last_active=1), autoCreate=False) self.assertEqual(id, None) class TestCachedDecorator(unittest.TestCase): def setUp(self): # set this to True to check that cache.get isn't called (for # no_cache=1) self.cache_get_raises_exception = False class TestConnectorComponent(base.DBConnectorComponent): invocations = None @base.cached("mycache") def getThing(self, key): if self.invocations is None: self.invocations = [] self.invocations.append(key) return defer.succeed(key * 2) def get_cache(self, cache_name, miss_fn): self.assertEqual(cache_name, "mycache") cache = mock.Mock(name="mycache") if self.cache_get_raises_exception: def ex(key): raise RuntimeError("cache.get called unexpectedly") cache.get = ex else: cache.get = miss_fn return cache # tests @defer.inlineCallbacks def test_cached(self): # attach it to the connector connector = mock.Mock(name="connector") connector.master.caches.get_cache = self.get_cache # build an instance comp = self.TestConnectorComponent(connector) # test it twice (to test an implementation detail) res1 = yield comp.getThing("foo") res2 = yield comp.getThing("bar") self.assertEqual((res1, res2, comp.invocations), ('foofoo', 'barbar', ['foo', 'bar'])) @defer.inlineCallbacks def test_cached_no_cache(self): # attach it to the connector connector = mock.Mock(name="connector") connector.master.caches.get_cache = self.get_cache self.cache_get_raises_exception = True # build an instance comp = self.TestConnectorComponent(connector) yield comp.getThing("foo", no_cache=1) buildbot-2.6.0/master/buildbot/test/unit/test_db_builders.py000066400000000000000000000256761361162603000242650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import builders from buildbot.db import tags from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin def builderKey(builder): return builder['id'] class Tests(interfaces.InterfaceTests): # common sample data builder_row = [ fakedb.Builder(id=7, name="some:builder"), ] # tests def test_signature_findBuilderId(self): @self.assertArgSpecMatches(self.db.builders.findBuilderId) def findBuilderId(self, name, autoCreate=True): pass def test_signature_addBuilderMaster(self): @self.assertArgSpecMatches(self.db.builders.addBuilderMaster) def addBuilderMaster(self, builderid=None, masterid=None): pass def test_signature_removeBuilderMaster(self): @self.assertArgSpecMatches(self.db.builders.removeBuilderMaster) def removeBuilderMaster(self, builderid=None, masterid=None): pass def test_signature_getBuilder(self): @self.assertArgSpecMatches(self.db.builders.getBuilder) def getBuilder(self, builderid): pass def test_signature_getBuilders(self): @self.assertArgSpecMatches(self.db.builders.getBuilders) def getBuilders(self, masterid=None): pass def test_signature_updateBuilderInfo(self): @self.assertArgSpecMatches(self.db.builders.updateBuilderInfo) def updateBuilderInfo(self, builderid, description, tags): pass @defer.inlineCallbacks def test_updateBuilderInfo(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder7'), fakedb.Builder(id=8, name='some:builder8'), ]) yield self.db.builders.updateBuilderInfo(7, 'a string which describe the builder', ['cat1', 'cat2']) yield self.db.builders.updateBuilderInfo(8, 'a string which describe the builder', []) builderdict7 = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict7) builderdict7['tags'].sort() # order is unspecified self.assertEqual(builderdict7, dict(id=7, name='some:builder7', tags=['cat1', 'cat2'], masterids=[], description='a string which describe the builder')) builderdict8 = yield self.db.builders.getBuilder(8) validation.verifyDbDict(self, 'builderdict', builderdict8) self.assertEqual(builderdict8, dict(id=8, name='some:builder8', tags=[], masterids=[], description='a string which describe the builder')) @defer.inlineCallbacks def test_findBuilderId_new(self): id = yield self.db.builders.findBuilderId('some:builder') builderdict = yield self.db.builders.getBuilder(id) self.assertEqual(builderdict, dict(id=id, name='some:builder', tags=[], masterids=[], description=None)) @defer.inlineCallbacks def test_findBuilderId_new_no_autoCreate(self): id = yield self.db.builders.findBuilderId('some:builder', autoCreate=False) self.assertIsNone(id) @defer.inlineCallbacks def test_findBuilderId_exists(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), ]) id = yield self.db.builders.findBuilderId('some:builder') self.assertEqual(id, 7) @defer.inlineCallbacks def test_addBuilderMaster(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='abc'), fakedb.Master(id=10, name='def'), fakedb.BuilderMaster(builderid=7, masterid=10), ]) yield self.db.builders.addBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[9, 10], description=None)) @defer.inlineCallbacks def test_addBuilderMaster_already_present(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='abc'), fakedb.Master(id=10, name='def'), fakedb.BuilderMaster(builderid=7, masterid=9), ]) yield self.db.builders.addBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[9], description=None)) @defer.inlineCallbacks def test_removeBuilderMaster(self): yield self.insertTestData([ fakedb.Builder(id=7), fakedb.Master(id=9, name='some:master'), fakedb.Master(id=10, name='other:master'), fakedb.BuilderMaster(builderid=7, masterid=9), fakedb.BuilderMaster(builderid=7, masterid=10), ]) yield self.db.builders.removeBuilderMaster(builderid=7, masterid=9) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[10], description=None)) @defer.inlineCallbacks def test_getBuilder_no_masters(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), ]) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[], description=None)) @defer.inlineCallbacks def test_getBuilder_with_masters(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=7, masterid=4), ]) builderdict = yield self.db.builders.getBuilder(7) validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(builderdict, dict(id=7, name='some:builder', tags=[], masterids=[3, 4], description=None)) @defer.inlineCallbacks def test_getBuilder_missing(self): builderdict = yield self.db.builders.getBuilder(7) self.assertEqual(builderdict, None) @defer.inlineCallbacks def test_getBuilders(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Builder(id=8, name='other:builder'), fakedb.Builder(id=9, name='third:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=4), ]) builderlist = yield self.db.builders.getBuilders() for builderdict in builderlist: validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(sorted(builderlist, key=builderKey), sorted([ dict(id=7, name='some:builder', masterids=[ 3], tags=[], description=None), dict(id=8, name='other:builder', masterids=[ 3, 4], tags=[], description=None), dict(id=9, name='third:builder', masterids=[], tags=[], description=None), ], key=builderKey)) @defer.inlineCallbacks def test_getBuilders_masterid(self): yield self.insertTestData([ fakedb.Builder(id=7, name='some:builder'), fakedb.Builder(id=8, name='other:builder'), fakedb.Builder(id=9, name='third:builder'), fakedb.Master(id=3, name='m1'), fakedb.Master(id=4, name='m2'), fakedb.BuilderMaster(builderid=7, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=3), fakedb.BuilderMaster(builderid=8, masterid=4), ]) builderlist = yield self.db.builders.getBuilders(masterid=3) for builderdict in builderlist: validation.verifyDbDict(self, 'builderdict', builderdict) self.assertEqual(sorted(builderlist, key=builderKey), sorted([ dict(id=7, name='some:builder', masterids=[ 3], tags=[], description=None), dict(id=8, name='other:builder', masterids=[ 3, 4], tags=[], description=None), ], key=builderKey)) @defer.inlineCallbacks def test_getBuilders_empty(self): builderlist = yield self.db.builders.getBuilders() self.assertEqual(sorted(builderlist), []) class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['builders', 'masters', 'builder_masters', 'builders_tags', 'tags']) self.db.builders = builders.BuildersConnectorComponent(self.db) self.db.tags = tags.TagsConnectorComponent(self.db) self.master = self.db.master self.master.db = self.db def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_buildrequests.py000066400000000000000000000712651361162603000253420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from twisted.internet import defer from twisted.trial import unittest from buildbot.db import buildrequests from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin from buildbot.util import UTC from buildbot.util import epoch2datetime class Tests(interfaces.InterfaceTests): # test that the datetime translations are done correctly by specifying # the epoch timestamp and datetime objects explicitly. These should # pass regardless of the local timezone used while running tests! CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC) CLAIMED_AT_EPOCH = 266761875 SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC) SUBMITTED_AT_EPOCH = 298297875 COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC) COMPLETE_AT_EPOCH = 329920275 BSID = 567 BLDRID1 = 890 BLDRID2 = 891 BLDRID3 = 893 MASTER_ID = "set in setUp" OTHER_MASTER_ID = "set in setUp" def setUpTests(self): # set up a sourcestamp and buildset for use below self.MASTER_ID = fakedb.FakeBuildRequestsComponent.MASTER_ID self.OTHER_MASTER_ID = self.MASTER_ID + 1111 self.db.master.masterid = self.MASTER_ID return self.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Master(id=self.MASTER_ID, name="fake master"), fakedb.Master(id=self.OTHER_MASTER_ID, name="other"), fakedb.Buildset(id=self.BSID), fakedb.Builder(id=self.BLDRID1, name="builder1"), fakedb.Builder(id=self.BLDRID2, name="builder2"), fakedb.Builder(id=self.BLDRID3, name="builder3"), fakedb.BuildsetSourceStamp(buildsetid=self.BSID, sourcestampid=234), ]) # tests @defer.inlineCallbacks def test_getBuildRequest(self): yield self.insertTestData([ fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=75, priority=7, submitted_at=self.SUBMITTED_AT_EPOCH, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim( brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), ]) brdict = yield self.db.buildrequests.getBuildRequest(44) yield self.assertEqual(brdict, dict(buildrequestid=44, buildsetid=self.BSID, builderid=self.BLDRID1, buildername="builder1", priority=7, claimed=True, claimed_by_masterid=self.MASTER_ID, complete=True, results=75, claimed_at=self.CLAIMED_AT, submitted_at=self.SUBMITTED_AT, complete_at=self.COMPLETE_AT, waited_for=False)) @defer.inlineCallbacks def test_getBuildRequest_missing(self): brdict = yield self.db.buildrequests.getBuildRequest(44) self.assertEqual(brdict, None) @defer.inlineCallbacks def do_test_getBuildRequests_claim_args(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 50: claimed by this master fakedb.BuildRequest( id=50, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=50, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 51: claimed by another master fakedb.BuildRequest( id=51, buildsetid=self.BSID, builderid=self.BLDRID2), fakedb.BuildRequestClaim(brid=51, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 52: unclaimed fakedb.BuildRequest( id=52, buildsetid=self.BSID, builderid=self.BLDRID1), # 53: unclaimed but complete (should not appear for claimed=False) fakedb.BuildRequest( id=53, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_no_claimed_arg(self): return self.do_test_getBuildRequests_claim_args( expected=[50, 51, 52, 53]) def test_getBuildRequests_claimed_mine(self): return self.do_test_getBuildRequests_claim_args( claimed=self.MASTER_ID, expected=[50]) def test_getBuildRequests_claimed_true(self): return self.do_test_getBuildRequests_claim_args( claimed=True, expected=[50, 51]) def test_getBuildRequests_unclaimed(self): return self.do_test_getBuildRequests_claim_args( claimed=False, expected=[52]) @defer.inlineCallbacks def do_test_getBuildRequests_buildername_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 8: 'bb' fakedb.BuildRequest( id=8, buildsetid=self.BSID, builderid=self.BLDRID1), # 9: 'cc' fakedb.BuildRequest( id=9, buildsetid=self.BSID, builderid=self.BLDRID2), # 10: 'cc' fakedb.BuildRequest( id=10, buildsetid=self.BSID, builderid=self.BLDRID2), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) @defer.inlineCallbacks def do_test_getBuildRequests_complete_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ # 70: incomplete fakedb.BuildRequest(id=70, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), # 80: complete fakedb.BuildRequest(id=80, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=self.COMPLETE_AT_EPOCH), # 81: complete but no complete_at fakedb.BuildRequest(id=81, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=0), # 82: complete_at set but complete is false, so not complete fakedb.BuildRequest(id=82, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=self.COMPLETE_AT_EPOCH), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_complete_none(self): return self.do_test_getBuildRequests_complete_arg( expected=[70, 80, 81, 82]) def test_getBuildRequests_complete_true(self): return self.do_test_getBuildRequests_complete_arg( complete=True, expected=[80, 81]) def test_getBuildRequests_complete_false(self): return self.do_test_getBuildRequests_complete_arg( complete=False, expected=[70, 82]) @defer.inlineCallbacks def test_getBuildRequests_bsid_arg(self): yield self.insertTestData([ # the buildset that we are *not* looking for fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest(id=70, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), fakedb.BuildRequest(id=71, buildsetid=self.BSID + 1, builderid=self.BLDRID1, complete=0, complete_at=None), fakedb.BuildRequest(id=72, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=None), ]) brlist = yield self.db.buildrequests.getBuildRequests(bsid=self.BSID) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted([70, 72])) @defer.inlineCallbacks def test_getBuildRequests_combo(self): yield self.insertTestData([ # 44: everything we want fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 45: different builderid fakedb.BuildRequest(id=45, buildsetid=self.BSID, builderid=self.BLDRID2, complete=1, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=45, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 46: incomplete fakedb.BuildRequest(id=46, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, results=92, complete_at=0), fakedb.BuildRequestClaim(brid=46, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 47: unclaimed fakedb.BuildRequest(id=47, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), # 48: claimed by other fakedb.BuildRequest(id=48, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=48, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 49: different bsid fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest(id=49, buildsetid=self.BSID + 1, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=49, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), ]) brlist = yield self.db.buildrequests.getBuildRequests( builderid=self.BLDRID1, claimed=self.MASTER_ID, complete=True, bsid=self.BSID) self.assertEqual([br['buildrequestid'] for br in brlist], [44]) @defer.inlineCallbacks def do_test_getBuildRequests_branch_arg(self, **kwargs): expected = kwargs.pop('expected') yield self.insertTestData([ fakedb.Buildset(id=self.BSID + 1), fakedb.BuildRequest( id=70, buildsetid=self.BSID + 1, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 1, branch='branch_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 1, sourcestampid=self.BSID + 1), fakedb.Buildset(id=self.BSID + 2), fakedb.BuildRequest( id=80, buildsetid=self.BSID + 2, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 2, repository='repository_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 2, sourcestampid=self.BSID + 2), fakedb.Buildset(id=self.BSID + 3), fakedb.BuildRequest( id=90, buildsetid=self.BSID + 3, builderid=self.BLDRID1), fakedb.SourceStamp(id=self.BSID + 3, branch='branch_A', repository='repository_A'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 3, sourcestampid=self.BSID + 3), # multiple sourcestamps on the same buildset are possible fakedb.SourceStamp(id=self.BSID + 4, branch='branch_B', repository='repository_B'), fakedb.BuildsetSourceStamp(buildsetid=self.BSID + 3, sourcestampid=self.BSID + 4), ]) brlist = yield self.db.buildrequests.getBuildRequests(**kwargs) self.assertEqual(sorted([br['buildrequestid'] for br in brlist]), sorted(expected)) def test_getBuildRequests_branch(self): return self.do_test_getBuildRequests_branch_arg(branch='branch_A', expected=[70, 90]) def test_getBuildRequests_branch_empty(self): return self.do_test_getBuildRequests_branch_arg(branch='absent_branch', expected=[]) def test_getBuildRequests_repository(self): return self.do_test_getBuildRequests_branch_arg( repository='repository_A', expected=[80, 90]) def test_getBuildRequests_repository_empty(self): return self.do_test_getBuildRequests_branch_arg( repository='absent_repository', expected=[]) def test_getBuildRequests_repository_and_branch(self): return self.do_test_getBuildRequests_branch_arg( repository='repository_A', branch='branch_A', expected=[90]) def test_getBuildRequests_no_repository_nor_branch(self): return self.do_test_getBuildRequests_branch_arg(expected=[70, 80, 90]) def failWithExpFailure(self, exc, expfailure=None): if not expfailure: raise exc self.flushLoggedErrors(expfailure) if isinstance(exc, expfailure): return raise exc @defer.inlineCallbacks def do_test_claimBuildRequests(self, rows, now, brids, expected=None, expfailure=None, claimed_at=None): self.reactor.advance(now) try: yield self.insertTestData(rows) yield self.db.buildrequests.claimBuildRequests(brids=brids, claimed_at=claimed_at) results = yield self.db.buildrequests.getBuildRequests() self.assertNotEqual(expected, None, "unexpected success from claimBuildRequests") self.assertEqual( sorted([(r['buildrequestid'], r['claimed_at'], r['claimed_by_masterid']) for r in results]), sorted(expected)) except Exception as e: self.failWithExpFailure(e, expfailure) def test_claimBuildRequests_single(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44], [(44, epoch2datetime(1300305712), self.MASTER_ID)]) def test_claimBuildRequests_single_explicit_claimed_at(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44], [(44, epoch2datetime(14000000), self.MASTER_ID)], claimed_at=epoch2datetime(14000000)) def test_claimBuildRequests_multiple(self): return self.do_test_claimBuildRequests( [ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, [44, 46], [ (44, epoch2datetime(1300305712), self.MASTER_ID), (45, None, None), (46, epoch2datetime(1300305712), self.MASTER_ID), ]) def test_claimBuildRequests_stress(self): return self.do_test_claimBuildRequests( [ fakedb.BuildRequest( id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 1000) ], 1300305713, list(range(1, 1000)), [ (id, epoch2datetime(1300305713), self.MASTER_ID) for id in range(1, 1000) ] ) def test_claimBuildRequests_other_master_claim(self): return self.do_test_claimBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.OTHER_MASTER_ID, claimed_at=1300103810), ], 1300305712, [44], expfailure=buildrequests.AlreadyClaimedError) @db.skip_for_dialect('mysql') @defer.inlineCallbacks def test_claimBuildRequests_other_master_claim_stress(self): yield self.do_test_claimBuildRequests( [fakedb.BuildRequest(id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 1000)] + [ fakedb.BuildRequest( id=1000, buildsetid=self.BSID, builderid=self.BLDRID1), # the fly in the ointment.. fakedb.BuildRequestClaim(brid=1000, masterid=self.OTHER_MASTER_ID, claimed_at=1300103810), ], 1300305712, list(range(1, 1001)), expfailure=buildrequests.AlreadyClaimedError) results = yield self.db.buildrequests.getBuildRequests(claimed=True) # check that [1,1000) were not claimed, and 1000 is still claimed self.assertEqual([ (r['buildrequestid'], r[ 'claimed_by_masterid'], r['claimed_at']) for r in results ][:10], [ (1000, self.OTHER_MASTER_ID, epoch2datetime(1300103810)) ]) @defer.inlineCallbacks def test_claimBuildRequests_sequential(self): now = 120350934 self.reactor.advance(now) yield self.insertTestData([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), ]) yield self.db.buildrequests.claimBuildRequests(brids=[44]) yield self.db.buildrequests.claimBuildRequests(brids=[45]) results = yield self.db.buildrequests.getBuildRequests(claimed=False) self.assertEqual(results, []) @defer.inlineCallbacks def do_test_completeBuildRequests(self, rows, now, expected=None, expfailure=None, brids=None, complete_at=None): if brids is None: brids = [44] self.reactor.advance(now) try: yield self.insertTestData(rows) yield self.db.buildrequests.completeBuildRequests( brids=brids, results=7, complete_at=complete_at) results = yield self.db.buildrequests.getBuildRequests() self.assertNotEqual(expected, None, "unexpected success from completeBuildRequests") self.assertEqual(sorted( (r['buildrequestid'], r['complete'], r['results'], r['complete_at']) for r in results ), sorted(expected)) except Exception as e: self.failWithExpFailure(e, expfailure) def test_completeBuildRequests(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712))]) def test_completeBuildRequests_explicit_time(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), ], 1300305712, [(44, True, 7, epoch2datetime(999999))], complete_at=epoch2datetime(999999)) def test_completeBuildRequests_multiple(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=1300103810), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=45, masterid=self.OTHER_MASTER_ID, claimed_at=1300103811), fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=46, masterid=self.MASTER_ID, claimed_at=1300103812), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712)), (45, False, -1, None), (46, True, 7, epoch2datetime(1300305712)), ], brids=[44, 46]) def test_completeBuildRequests_stress(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=id, buildsetid=self.BSID, builderid=self.BLDRID1) for id in range(1, 280) ] + [ fakedb.BuildRequestClaim(brid=id, masterid=self.MASTER_ID, claimed_at=1300103810) for id in range(1, 280) ], 1300305712, [(id, True, 7, epoch2datetime(1300305712)) for id in range(1, 280) ], brids=list(range(1, 280))) def test_completeBuildRequests_multiple_notmine(self): # note that the requests are completed even though they are not mine! return self.do_test_completeBuildRequests([ # two unclaimed requests fakedb.BuildRequest( id=44, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), # and one claimed by another master fakedb.BuildRequest( id=46, buildsetid=self.BSID, builderid=self.BLDRID1), fakedb.BuildRequestClaim(brid=46, masterid=self.OTHER_MASTER_ID, claimed_at=1300103812), ], 1300305712, [(44, True, 7, epoch2datetime(1300305712)), (45, True, 7, epoch2datetime(1300305712)), (46, True, 7, epoch2datetime(1300305712)), ], brids=[44, 45, 46]) def test_completeBuildRequests_already_completed(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, complete_at=1300104190), ], 1300305712, expfailure=buildrequests.NotClaimedError) def test_completeBuildRequests_no_such(self): return self.do_test_completeBuildRequests([ fakedb.BuildRequest( id=45, buildsetid=self.BSID, builderid=self.BLDRID1), ], 1300305712, expfailure=buildrequests.NotClaimedError) @defer.inlineCallbacks def do_test_unclaimMethod(self, method, expected): yield self.insertTestData([ # 44: a complete build (should not be unclaimed) fakedb.BuildRequest(id=44, buildsetid=self.BSID, builderid=self.BLDRID1, complete=1, results=92, complete_at=self.COMPLETE_AT_EPOCH), fakedb.BuildRequestClaim(brid=44, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 45: incomplete build belonging to this incarnation fakedb.BuildRequest(id=45, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=45, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 46: incomplete build belonging to another master fakedb.BuildRequest(id=46, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=46, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH), # 47: unclaimed fakedb.BuildRequest(id=47, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), # 48: claimed by this master, but recently fakedb.BuildRequest(id=48, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=48, masterid=self.MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH - 50), # 49: incomplete old build belonging to another master fakedb.BuildRequest(id=49, buildsetid=self.BSID, builderid=self.BLDRID1, complete=0, complete_at=0), fakedb.BuildRequestClaim(brid=49, masterid=self.OTHER_MASTER_ID, claimed_at=self.CLAIMED_AT_EPOCH - 1000), ]) yield method() # just select the unclaimed requests results = yield self.db.buildrequests.getBuildRequests(claimed=False) self.assertEqual(sorted([r['buildrequestid'] for r in results]), sorted(expected)) def test_unclaimBuildRequests(self): to_unclaim = [ 44, # completed -> should not be unclaimed 45, # incomplete -> unclaimed 46, # from another master -> not unclaimed 47, # unclaimed -> still unclaimed 48, # claimed -> unclaimed 49, # another master -> not unclaimed 50 # no such buildrequest -> no error ] return self.do_test_unclaimMethod( lambda: self.db.buildrequests.unclaimBuildRequests(to_unclaim), [45, 47, 48]) class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): # Compatibility with some checks in the "real" tests. class db_engine: class dialect: name = 'buildbot_fake' def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData return self.setUpTests() class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, Tests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['patches', 'changes', 'builders', 'buildsets', 'buildset_properties', 'buildrequests', 'buildset_sourcestamps', 'masters', 'buildrequest_claims', 'sourcestamps', 'sourcestampsets', 'builds', 'workers', ]) self.db.buildrequests = \ buildrequests.BuildRequestsConnectorComponent(self.db) yield self.setUpTests() def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_builds.py000066400000000000000000000536641361162603000237340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.data import resultspec from buildbot.db import builds from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime TIME1 = 1304262222 TIME2 = 1304262223 TIME3 = 1304262224 TIME4 = 1304262235 CREATED_AT = 927845299 class Tests(interfaces.InterfaceTests): # common sample data backgroundData = [ fakedb.Buildset(id=20), fakedb.Builder(id=77, name="b1"), fakedb.Builder(id=88, name="b2"), fakedb.BuildRequest(id=40, buildsetid=20, builderid=77), fakedb.BuildRequest(id=41, buildsetid=20, builderid=77), fakedb.BuildRequest(id=42, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Master(id=89, name="bar"), fakedb.Worker(id=13, name='wrk'), fakedb.Worker(id=12, name='sl2'), ] threeBuilds = [ fakedb.Build(id=50, buildrequestid=42, number=5, masterid=88, builderid=77, workerid=13, state_string="build 5", started_at=TIME1), fakedb.Build(id=51, buildrequestid=41, number=6, masterid=88, builderid=88, workerid=13, state_string="build 6", started_at=TIME2), fakedb.Build(id=52, buildrequestid=42, number=7, masterid=88, builderid=77, workerid=12, state_string="build 7", started_at=TIME3, complete_at=TIME4, results=5), ] threeBdicts = { 50: {'id': 50, 'buildrequestid': 42, 'builderid': 77, 'masterid': 88, 'number': 5, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'build 5', 'results': None}, 51: {'id': 51, 'buildrequestid': 41, 'builderid': 88, 'masterid': 88, 'number': 6, 'workerid': 13, 'started_at': epoch2datetime(TIME2), 'complete_at': None, 'state_string': 'build 6', 'results': None}, 52: {'id': 52, 'buildrequestid': 42, 'builderid': 77, 'masterid': 88, 'number': 7, 'workerid': 12, 'started_at': epoch2datetime(TIME3), 'complete_at': epoch2datetime(TIME4), 'state_string': 'build 7', 'results': 5}, } # signature tests def test_signature_getBuild(self): @self.assertArgSpecMatches(self.db.builds.getBuild) def getBuild(self, buildid): pass def test_signature_getBuildByNumber(self): @self.assertArgSpecMatches(self.db.builds.getBuildByNumber) def getBuild(self, builderid, number): pass def test_signature_getBuilds(self): @self.assertArgSpecMatches(self.db.builds.getBuilds) def getBuilds(self, builderid=None, buildrequestid=None, workerid=None, complete=None, resultSpec=None): pass def test_signature_addBuild(self): @self.assertArgSpecMatches(self.db.builds.addBuild) def addBuild(self, builderid, buildrequestid, workerid, masterid, state_string): pass def test_signature_setBuildStateString(self): @self.assertArgSpecMatches(self.db.builds.setBuildStateString) def setBuildStateString(self, buildid, state_string): pass def test_signature_finishBuild(self): @self.assertArgSpecMatches(self.db.builds.finishBuild) def finishBuild(self, buildid, results): pass def test_signature_getBuildProperties(self): @self.assertArgSpecMatches(self.db.builds.getBuildProperties) def getBuildProperties(self, bid): pass def test_signature_setBuildProperty(self): @self.assertArgSpecMatches(self.db.builds.setBuildProperty) def setBuildProperty(self, bid, name, value, source): pass # method tests @defer.inlineCallbacks def test_getBuild(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=None, state_string='build 5', results=None)) @defer.inlineCallbacks def test_getBuild_missing(self): bdict = yield self.db.builds.getBuild(50) self.assertEqual(bdict, None) @defer.inlineCallbacks def test_getBuildByNumber(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) bdict = yield self.db.builds.getBuildByNumber(builderid=77, number=5) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict['id'], 50) @defer.inlineCallbacks def test_getBuilds(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds() for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_builderid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(builderid=88) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[51]]) @defer.inlineCallbacks def test_getBuilds_buildrequestid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(buildrequestid=42) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_workerid(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(workerid=13) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51]]) def test_signature_getBuildsForChange(self): @self.assertArgSpecMatches(self.db.builds.getBuildsForChange) def getBuildsForChange(self, changeid): pass @defer.inlineCallbacks def do_test_getBuildsForChange(self, rows, changeid, expected): yield self.insertTestData(rows) builds = yield self.db.builds.getBuildsForChange(changeid) self.assertEqual(sorted(builds), sorted(expected)) def test_getBuildsForChange_OneCodebase(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, created_at=CREATED_AT, revision="aaa"), fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=1, complete=0, complete_at=None), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222, results=1), ] expected = [{ 'id': 50, 'number': 5, 'builderid': 77, 'buildrequestid': 19, 'workerid': 13, 'masterid': 88, 'started_at': epoch2datetime(1304262222), 'complete_at': None, 'state_string': 'test', 'results': 1}] return self.do_test_getBuildsForChange(rows, 14, expected) @defer.inlineCallbacks def test_getBuilds_complete(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(complete=True) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_addBuild_first(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2') bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_addBuild_existing(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Build(number=10, buildrequestid=41, builderid=77, masterid=88, workerid=13), ]) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2') bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(number, 11) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_setBuildStateString(self): yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) yield self.db.builds.setBuildStateString(buildid=50, state_string='test test2') bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=None, state_string='test test2', results=None)) @defer.inlineCallbacks def test_finishBuild(self): self.reactor.advance(TIME4) yield self.insertTestData(self.backgroundData + [self.threeBuilds[0]]) yield self.db.builds.finishBuild(buildid=50, results=7) bdict = yield self.db.builds.getBuild(50) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(bdict, dict(id=50, number=5, buildrequestid=42, masterid=88, builderid=77, workerid=13, started_at=epoch2datetime(TIME1), complete_at=epoch2datetime(TIME4), state_string='build 5', results=7)) @defer.inlineCallbacks def testgetBuildPropertiesEmpty(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) for buildid in (50, 51, 52): props = yield self.db.builds.getBuildProperties(buildid) self.assertEqual(0, len(props)) @defer.inlineCallbacks def testsetandgetProperties(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) yield self.db.builds.setBuildProperty(50, 'prop', 42, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (42, 'test')}) @defer.inlineCallbacks def testsetgetsetProperties(self): yield self.insertTestData(self.backgroundData + self.threeBuilds) props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {}) yield self.db.builds.setBuildProperty(50, 'prop', 42, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (42, 'test')}) # set a new value yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test')}) # set a new source yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test_source') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test_source')}) # set the same yield self.db.builds.setBuildProperty(50, 'prop', 45, 'test_source') props = yield self.db.builds.getBuildProperties(50) self.assertEqual(props, {'prop': (45, 'test_source')}) class RealTests(Tests): @defer.inlineCallbacks def test_addBuild_existing_race(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) # add new builds at *just* the wrong time, repeatedly numbers = list(range(1, 8)) def raceHook(conn): if not numbers: return conn.execute(self.db.model.builds.insert(), {'number': numbers.pop(0), 'buildrequestid': 41, 'masterid': 88, 'workerid': 13, 'builderid': 77, 'started_at': TIME1, 'state_string': "hi"}) id, number = yield self.db.builds.addBuild(builderid=77, buildrequestid=41, workerid=13, masterid=88, state_string='test test2', _race_hook=raceHook) bdict = yield self.db.builds.getBuild(id) validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(number, 8) self.assertEqual(bdict, {'buildrequestid': 41, 'builderid': 77, 'id': id, 'masterid': 88, 'number': number, 'workerid': 13, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'state_string': 'test test2', 'results': None}) @defer.inlineCallbacks def test_getBuilds_resultSpecFilter(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('complete_at', 'ne', [None])]) rs.fieldMapping = {'complete_at': 'builds.complete_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecOrder(self): rs = resultspec.ResultSpec(order=['-started_at']) rs.fieldMapping = {'started_at': 'builds.started_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) # applying the spec in the db layer should have emptied the order in # resultSpec self.assertEqual(rs.order, None) # assert applying the same order at the data layer will give the same # results rs = resultspec.ResultSpec(order=['-started_at']) ordered_bdicts = rs.apply(bdicts) self.assertEqual(ordered_bdicts, bdicts) # assert applying an opposite order at the data layer will give different # results rs = resultspec.ResultSpec(order=['started_at']) ordered_bdicts = rs.apply(bdicts) self.assertNotEqual(ordered_bdicts, bdicts) @defer.inlineCallbacks def test_getBuilds_limit(self): rs = resultspec.ResultSpec(order=['-started_at'], limit=1, offset=2) rs.fieldMapping = {'started_at': 'builds.started_at'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) # applying the spec in the db layer should have emptied the limit and # offset in resultSpec self.assertEqual(rs.limit, None) self.assertEqual(rs.offset, None) # assert applying the same filter at the data layer will give the same # results rs = resultspec.ResultSpec(order=['-started_at'], limit=1, offset=2) bdicts2 = yield self.db.builds.getBuilds() ordered_bdicts = rs.apply(bdicts2) self.assertEqual(ordered_bdicts, bdicts) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterEqTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('number', 'eq', [6, 7])]) rs.fieldMapping = {'number': 'builds.number'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[51], self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterNeTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('number', 'ne', [6, 7])]) rs.fieldMapping = {'number': 'builds.number'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterContainsOneValue(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('state_string', 'contains', ['7'])]) rs.fieldMapping = {'state_string': 'builds.state_string'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[52]]) @defer.inlineCallbacks def test_getBuilds_resultSpecFilterContainsTwoValues(self): rs = resultspec.ResultSpec( filters=[resultspec.Filter('state_string', 'contains', ['build 5', 'build 6'])]) rs.fieldMapping = {'state_string': 'builds.state_string'} yield self.insertTestData(self.backgroundData + self.threeBuilds) bdicts = yield self.db.builds.getBuilds(resultSpec=rs) for bdict in bdicts: validation.verifyDbDict(self, 'dbbuilddict', bdict) self.assertEqual(sorted(bdicts, key=lambda bd: bd['id']), [self.threeBdicts[50], self.threeBdicts[51]]) class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers', 'build_properties', 'changes', 'sourcestamps', 'buildset_sourcestamps', 'patches']) self.db.builds = builds.BuildsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_buildsets.py000066400000000000000000000602721361162603000244410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import buildsets from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import UTC from buildbot.util import datetime2epoch from buildbot.util import epoch2datetime class Tests(interfaces.InterfaceTests): def setUpTests(self): self.now = 9272359 self.reactor.advance(self.now) # set up a sourcestamp with id 234 for use below return self.insertTestData([ fakedb.SourceStamp(id=234), fakedb.Builder(id=1, name='bldr1'), fakedb.Builder(id=2, name='bldr2'), ]) def test_signature_addBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.addBuildset) def addBuildset(self, sourcestamps, reason, properties, builderids, waited_for, external_idstring=None, submitted_at=None, parent_buildid=None, parent_relationship=None): pass def test_signature_completeBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.completeBuildset) def completeBuildset(self, bsid, results, complete_at=None): pass def test_signature_getBuildset(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildset) def getBuildset(self, bsid): pass def test_signature_getBuildsets(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildsets) def getBuildsets(self, complete=None, resultSpec=None): pass def test_signature_getRecentBuildsets(self): @self.assertArgSpecMatches(self.db.buildsets.getRecentBuildsets) def getBuildsets(self, count=None, branch=None, repository=None, complete=None): pass def test_signature_getBuildsetProperties(self): @self.assertArgSpecMatches(self.db.buildsets.getBuildsetProperties) def getBuildsetProperties(self, key, no_cache=False): pass @defer.inlineCallbacks def test_addBuildset_getBuildset(self): bsid, brids = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', waited_for=False) # TODO: verify buildrequests too bsdict = yield self.db.buildsets.getBuildset(bsid) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='because', sourcestamps=[234], submitted_at=datetime.datetime(1970, 4, 18, 7, 39, 19, tzinfo=UTC), complete=False, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None, bsid=bsid)) @defer.inlineCallbacks def test_addBuildset_getBuildset_explicit_submitted_at(self): bsid_brids = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', submitted_at=epoch2datetime(8888888), waited_for=False) bsdict = yield self.db.buildsets.getBuildset(bsid_brids[0]) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='because', sourcestamps=[234], submitted_at=datetime.datetime(1970, 4, 13, 21, 8, 8, tzinfo=UTC), complete=False, complete_at=None, results=-1, parent_buildid=None, parent_relationship=None, bsid=bsdict['bsid'])) @defer.inlineCallbacks def do_test_getBuildsetProperties(self, buildsetid, rows, expected): yield self.insertTestData(rows) props = yield self.db.buildsets.getBuildsetProperties(buildsetid) self.assertEqual(props, expected) def test_getBuildsetProperties_multiple(self): return self.do_test_getBuildsetProperties(91, [ fakedb.Buildset(id=91, complete=0, results=-1, submitted_at=0), fakedb.BuildsetProperty(buildsetid=91, property_name='prop1', property_value='["one", "fake1"]'), fakedb.BuildsetProperty(buildsetid=91, property_name='prop2', property_value='["two", "fake2"]'), ], dict(prop1=("one", "fake1"), prop2=("two", "fake2"))) def test_getBuildsetProperties_empty(self): return self.do_test_getBuildsetProperties(91, [ fakedb.Buildset(id=91, complete=0, results=-1, submitted_at=0), ], dict()) def test_getBuildsetProperties_nosuch(self): "returns an empty dict even if no such buildset exists" return self.do_test_getBuildsetProperties(91, [], dict()) @defer.inlineCallbacks def test_getBuildset_incomplete_zero(self): yield self.insertTestData([ fakedb.Buildset(id=91, complete=0, complete_at=0, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), ]) bsdict = yield self.db.buildsets.getBuildset(91) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='rsn', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, complete_at=epoch2datetime(0), results=-1, bsid=91, parent_buildid=None, parent_relationship=None)) @defer.inlineCallbacks def test_getBuildset_complete(self): yield self.insertTestData([ fakedb.Buildset(id=91, complete=1, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), ]) bsdict = yield self.db.buildsets.getBuildset(91) validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdict, dict(external_idstring='extid', reason='rsn', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete=True, complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), results=-1, bsid=91, parent_buildid=None, parent_relationship=None)) @defer.inlineCallbacks def test_getBuildset_nosuch(self): bsdict = yield self.db.buildsets.getBuildset(91) self.assertEqual(bsdict, None) def insert_test_getBuildsets_data(self): return self.insertTestData([ fakedb.Buildset(id=91, complete=0, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn1'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=234), fakedb.Buildset(id=92, complete=1, complete_at=298297876, results=7, submitted_at=266761876, external_idstring='extid', reason='rsn2'), fakedb.BuildsetSourceStamp(buildsetid=92, sourcestampid=234), ]) @defer.inlineCallbacks def test_getBuildsets_empty(self): bsdictlist = yield self.db.buildsets.getBuildsets() self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getBuildsets_all(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets() def bsdictKey(bsdict): return bsdict['reason'] for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(sorted(bsdictlist, key=bsdictKey), sorted([ dict(external_idstring='extid', reason='rsn1', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), dict(external_idstring='extid', reason='rsn2', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ], key=bsdictKey)) @defer.inlineCallbacks def test_getBuildsets_complete(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets(complete=True) for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn2', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ]) @defer.inlineCallbacks def test_getBuildsets_incomplete(self): yield self.insert_test_getBuildsets_data() bsdictlist = yield self.db.buildsets.getBuildsets(complete=False) for bsdict in bsdictlist: validation.verifyDbDict(self, 'bsdict', bsdict) self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[234], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), ]) def test_completeBuildset_already_completed(self): d = self.insert_test_getBuildsets_data() d.addCallback(lambda _: self.db.buildsets.completeBuildset(bsid=92, results=6)) return self.assertFailure(d, buildsets.AlreadyCompleteError) def test_completeBuildset_missing(self): d = self.insert_test_getBuildsets_data() d.addCallback(lambda _: self.db.buildsets.completeBuildset(bsid=93, results=6)) return self.assertFailure(d, buildsets.AlreadyCompleteError) @defer.inlineCallbacks def test_completeBuildset(self): yield self.insert_test_getBuildsets_data() yield self.db.buildsets.completeBuildset(bsid=91, results=6) bsdicts = yield self.db.buildsets.getBuildsets() bsdicts = [(bsdict['bsid'], bsdict['complete'], datetime2epoch(bsdict['complete_at']), bsdict['results']) for bsdict in bsdicts] self.assertEqual(sorted(bsdicts), sorted([ (91, 1, self.now, 6), (92, 1, 298297876, 7)])) @defer.inlineCallbacks def test_completeBuildset_explicit_complete_at(self): yield self.insert_test_getBuildsets_data() yield self.db.buildsets.completeBuildset(bsid=91, results=6, complete_at=epoch2datetime(72759)) bsdicts = yield self.db.buildsets.getBuildsets() bsdicts = [(bsdict['bsid'], bsdict['complete'], datetime2epoch(bsdict['complete_at']), bsdict['results']) for bsdict in bsdicts] self.assertEqual(sorted(bsdicts), sorted([ (91, 1, 72759, 6), (92, 1, 298297876, 7)])) def insert_test_getRecentBuildsets_data(self): return self.insertTestData([ fakedb.SourceStamp(id=91, branch='branch_a', repository='repo_a'), fakedb.Buildset(id=91, complete=0, complete_at=298297875, results=-1, submitted_at=266761875, external_idstring='extid', reason='rsn1'), fakedb.BuildsetSourceStamp(buildsetid=91, sourcestampid=91), fakedb.Buildset(id=92, complete=1, complete_at=298297876, results=7, submitted_at=266761876, external_idstring='extid', reason='rsn2'), fakedb.BuildsetSourceStamp(buildsetid=92, sourcestampid=91), # buildset unrelated to the change fakedb.Buildset(id=93, complete=1, complete_at=298297877, results=7, submitted_at=266761877, external_idstring='extid', reason='rsn2'), ]) @defer.inlineCallbacks def test_getRecentBuildsets_all(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC), complete=False, results=-1, bsid=91, parent_buildid=None, parent_relationship=None), dict(external_idstring='extid', reason='rsn2', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None) ]) @defer.inlineCallbacks def test_getRecentBuildsets_one(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(1, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, [ dict(external_idstring='extid', reason='rsn2', sourcestamps=[91], submitted_at=datetime.datetime(1978, 6, 15, 12, 31, 16, tzinfo=UTC), complete_at=datetime.datetime(1979, 6, 15, 12, 31, 16, tzinfo=UTC), complete=True, results=7, bsid=92, parent_buildid=None, parent_relationship=None), ]) @defer.inlineCallbacks def test_getRecentBuildsets_zero(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(0, branch='branch_a', repository='repo_a') self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getRecentBuildsets_noBranchMatch(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='bad_branch', repository='repo_a') self.assertEqual(bsdictlist, []) @defer.inlineCallbacks def test_getRecentBuildsets_noRepoMatch(self): yield self.insert_test_getRecentBuildsets_data() bsdictlist = yield self.db.buildsets.getRecentBuildsets(2, branch='branch_a', repository='bad_repo') self.assertEqual(bsdictlist, []) class RealTests(Tests): @defer.inlineCallbacks def test_addBuildset_simple(self): (bsid, brids) = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties={}, builderids=[2], external_idstring='extid', waited_for=True) def thd(conn): # we should only have one brid self.assertEqual(len(brids), 1) # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [(row.id, row.external_idstring, row.reason, row.complete, row.complete_at, row.submitted_at, row.results) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 'extid', 'because', 0, None, self.now, -1)]) # one buildrequests row r = conn.execute(self.db.model.buildrequests.select()) self.assertEqual(r.keys(), ['id', 'buildsetid', 'builderid', 'priority', 'complete', 'results', 'submitted_at', 'complete_at', 'waited_for']) self.assertEqual(r.fetchall(), [(bsid, brids[2], 2, 0, 0, -1, self.now, None, 1)]) # one buildset_sourcestamps row r = conn.execute(self.db.model.buildset_sourcestamps.select()) self.assertEqual( list(r.keys()), ['id', 'buildsetid', 'sourcestampid']) self.assertEqual(r.fetchall(), [(1, bsid, 234)]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addBuildset_bigger(self): props = dict(prop=(['list'], 'test')) yield defer.succeed(None) xxx_todo_changeme1 = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', waited_for=False, properties=props, builderids=[1, 2]) (bsid, brids) = xxx_todo_changeme1 def thd(conn): self.assertEqual(len(brids), 2) # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [(row.id, row.external_idstring, row.reason, row.complete, row.complete_at, row.results) for row in r.fetchall()] self.assertEqual(rows, [(bsid, None, 'because', 0, None, -1)]) # one property row r = conn.execute(self.db.model.buildset_properties.select()) rows = [(row.buildsetid, row.property_name, row.property_value) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 'prop', json.dumps([['list'], 'test']))]) # one buildset_sourcestamps row r = conn.execute(self.db.model.buildset_sourcestamps.select()) rows = [(row.buildsetid, row.sourcestampid) for row in r.fetchall()] self.assertEqual(rows, [(bsid, 234)]) # and two buildrequests rows (and don't re-check the default # columns) r = conn.execute(self.db.model.buildrequests.select()) rows = [(row.buildsetid, row.id, row.builderid) for row in r.fetchall()] # we don't know which of the brids is assigned to which # buildername, but either one will do self.assertEqual(sorted(rows), [(bsid, brids[1], 1), (bsid, brids[2], 2)]) yield self.db.pool.do(thd) class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData return self.setUpTests() @defer.inlineCallbacks def test_addBuildset_bad_waited_for(self): # only the fake db asserts on the type of waited_for d = self.db.buildsets.addBuildset(sourcestamps=[234], reason='because', properties={}, builderids=[1], external_idstring='extid', waited_for='wat') yield self.assertFailure(d, AssertionError) class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['patches', 'buildsets', 'buildset_properties', 'objects', 'buildrequests', 'sourcestamps', 'buildset_sourcestamps', 'builders', 'builds', 'masters', 'workers']) self.db.buildsets = buildsets.BuildsetsConnectorComponent(self.db) yield self.setUpTests() def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def test_addBuildset_properties_cache(self): """ Test that `addChange` properly seeds the `getChange` cache. """ # Patchup the buildset properties cache so we can verify that # it got called form `addBuildset`. mockedCachePut = mock.Mock() self.patch( self.db.buildsets.getBuildsetProperties.cache, "put", mockedCachePut) # Setup a dummy set of properties to insert with the buildset. props = dict(prop=(['list'], 'test')) # Now, call `addBuildset`, and verify that the above properties # were seed in the `getBuildsetProperties` cache. bsid, _ = yield self.db.buildsets.addBuildset( sourcestamps=[234], reason='because', properties=props, builderids=[1, 2], waited_for=False) mockedCachePut.assert_called_once_with(bsid, props) buildbot-2.6.0/master/buildbot/test/unit/test_db_changes.py000066400000000000000000000734401361162603000240540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.db import builds from buildbot.db import changes from buildbot.db import sourcestamps from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime SOMETIME = 20398573 OTHERTIME = 937239287 class Tests(interfaces.InterfaceTests): # common sample data change13_rows = [ fakedb.SourceStamp(id=92, branch="thirteen"), fakedb.Change(changeid=13, author="dustin", comments="fix spelling", branch="master", revision="deadbeef", committer="justin", when_timestamp=266738400, revlink=None, category=None, repository='', codebase='', project='', sourcestampid=92), fakedb.ChangeFile(changeid=13, filename='master/README.txt'), fakedb.ChangeFile(changeid=13, filename='worker/README.txt'), fakedb.ChangeProperty(changeid=13, property_name='notest', property_value='["no","Change"]'), ] change14_rows = [ fakedb.SourceStamp(id=233, branch="fourteen"), fakedb.Change(changeid=14, author="warner", comments="fix whitespace", branch="warnerdb", revision="0e92a098b", committer="david", when_timestamp=266738404, revlink='http://warner/0e92a098b', category='devel', repository='git://warner', codebase='mainapp', project='Buildbot', sourcestampid=233), fakedb.ChangeFile(changeid=14, filename='master/buildbot/__init__.py'), ] change14_dict = { 'changeid': 14, 'parent_changeids': [], 'author': 'warner', 'committer': 'david', 'branch': 'warnerdb', 'category': 'devel', 'comments': 'fix whitespace', 'files': ['master/buildbot/__init__.py'], 'project': 'Buildbot', 'properties': {}, 'repository': 'git://warner', 'codebase': 'mainapp', 'revision': '0e92a098b', 'revlink': 'http://warner/0e92a098b', 'when_timestamp': epoch2datetime(266738404), 'sourcestampid': 233, } # tests def test_signature_addChange(self): @self.assertArgSpecMatches(self.db.changes.addChange) def addChange(self, author=None, committer=None, files=None, comments=None, is_dir=None, revision=None, when_timestamp=None, branch=None, category=None, revlink='', properties=None, repository='', codebase='', project='', uid=None): pass def test_signature_getChange(self): @self.assertArgSpecMatches(self.db.changes.getChange) def getChange(self, key, no_cache=False): pass @defer.inlineCallbacks def test_addChange_getChange(self): self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(OTHERTIME), branch='master', category=None, revlink=None, properties={}, repository='repo://', codebase='cb', project='proj') chdict = yield self.db.changes.getChange(changeid) validation.verifyDbDict(self, 'chdict', chdict) chdict = chdict.copy() ss = yield self.db.sourcestamps.getSourceStamp(chdict['sourcestampid']) chdict['sourcestampid'] = ss self.assertEqual(chdict, { 'author': 'dustin', 'committer': 'justin', 'branch': 'master', 'category': None, 'changeid': changeid, 'parent_changeids': [], 'codebase': 'cb', 'comments': 'fix spelling', 'files': [], 'project': 'proj', 'properties': {}, 'repository': 'repo://', 'revision': '2d6caa52', 'revlink': None, 'sourcestampid': { 'branch': 'master', 'codebase': 'cb', 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo://', 'revision': '2d6caa52', 'created_at': epoch2datetime(SOMETIME), 'ssid': ss['ssid'], }, 'when_timestamp': epoch2datetime(OTHERTIME), }) @defer.inlineCallbacks def test_addChange_withParent(self): yield self.insertTestData(self.change14_rows) self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='delanne', committer='melanne', files=[], comments='child of changeid14', revision='50adad56', when_timestamp=epoch2datetime(OTHERTIME), branch='warnerdb', category='devel', revlink=None, properties={}, repository='git://warner', codebase='mainapp', project='Buildbot') chdict = yield self.db.changes.getChange(changeid) validation.verifyDbDict(self, 'chdict', chdict) chdict = chdict.copy() ss = yield self.db.sourcestamps.getSourceStamp(chdict['sourcestampid']) chdict['sourcestampid'] = ss self.assertEqual(chdict, { 'author': 'delanne', 'committer': 'melanne', 'branch': 'warnerdb', 'category': 'devel', 'changeid': changeid, 'parent_changeids': [14], 'codebase': 'mainapp', 'comments': 'child of changeid14', 'files': [], 'project': 'Buildbot', 'properties': {}, 'repository': 'git://warner', 'revision': '50adad56', 'revlink': None, 'sourcestampid': { 'branch': 'warnerdb', 'codebase': 'mainapp', 'created_at': epoch2datetime(SOMETIME), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'Buildbot', 'repository': 'git://warner', 'revision': '50adad56', 'ssid': ss['ssid'] }, 'when_timestamp': epoch2datetime(OTHERTIME), }) @defer.inlineCallbacks def test_getChange_chdict(self): yield self.insertTestData(self.change14_rows) chdict = yield self.db.changes.getChange(14) validation.verifyDbDict(self, 'chdict', chdict) self.assertEqual(chdict, self.change14_dict) @defer.inlineCallbacks def test_getChange_missing(self): chdict = yield self.db.changes.getChange(14) self.assertTrue(chdict is None) def test_signature_getChangeUids(self): @self.assertArgSpecMatches(self.db.changes.getChangeUids) def getChangeUids(self, changeid): pass @defer.inlineCallbacks def test_getChangeUids_missing(self): res = yield self.db.changes.getChangeUids(1) self.assertEqual(res, []) @defer.inlineCallbacks def test_getChangeUids_found(self): yield self.insertTestData(self.change14_rows + [ fakedb.SourceStamp(id=92), fakedb.User(uid=1), fakedb.ChangeUser(changeid=14, uid=1), ]) res = yield self.db.changes.getChangeUids(14) self.assertEqual(res, [1]) @defer.inlineCallbacks def test_getChangeUids_multi(self): yield self.insertTestData(self.change14_rows + self.change13_rows + [ fakedb.User(uid=1, identifier="one"), fakedb.User(uid=2, identifier="two"), fakedb.User(uid=99, identifier="nooo"), fakedb.ChangeUser(changeid=14, uid=1), fakedb.ChangeUser(changeid=14, uid=2), fakedb.ChangeUser(changeid=13, uid=99), # not selected ]) res = yield self.db.changes.getChangeUids(14) self.assertEqual(sorted(res), [1, 2]) def test_signature_getRecentChanges(self): @self.assertArgSpecMatches(self.db.changes.getRecentChanges) def getRecentChanges(self, count): pass def test_signature_getChanges(self): @self.assertArgSpecMatches(self.db.changes.getChanges) def getChanges(self): pass def insert7Changes(self): return self.insertTestData([ fakedb.SourceStamp(id=922), fakedb.Change(changeid=8, sourcestampid=922), fakedb.Change(changeid=9, sourcestampid=922), fakedb.Change(changeid=10, sourcestampid=922), fakedb.Change(changeid=11, sourcestampid=922), fakedb.Change(changeid=12, sourcestampid=922), ] + self.change13_rows + self.change14_rows) @defer.inlineCallbacks def test_getRecentChanges_subset(self): yield self.insert7Changes() changes = yield self.db.changes.getRecentChanges(5) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, [10, 11, 12, 13, 14]) @defer.inlineCallbacks def test_getChangesCount(self): yield self.insert7Changes() n = yield self.db.changes.getChangesCount() self.assertEqual(n, 7) @defer.inlineCallbacks def test_getChangesHugeCount(self): yield self.insertTestData([ fakedb.SourceStamp(id=92), ] + [ fakedb.Change(changeid=i) for i in range(2, 102)]) n = yield self.db.changes.getChangesCount() self.assertEqual(n, 100) @defer.inlineCallbacks def test_getRecentChanges_empty(self): changes = yield self.db.changes.getRecentChanges(5) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, []) yield self.db.changes.getChanges() changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, []) @defer.inlineCallbacks def test_getRecentChanges_missing(self): yield self.insertTestData(self.change13_rows + self.change14_rows) def check(changes): # requested all, but only got 2 # sort by changeid, since we assert on change 13 at index 0 changes.sort(key=lambda c: c['changeid']) changeids = [c['changeid'] for c in changes] self.assertEqual(changeids, [13, 14]) # double-check that they have .files, etc. self.assertEqual(sorted(changes[0]['files']), sorted(['master/README.txt', 'worker/README.txt'])) self.assertEqual(changes[0]['properties'], {'notest': ('no', 'Change')}) changes = yield self.db.changes.getRecentChanges(5) check(changes) changes = yield self.db.changes.getChanges() check(changes) def test_signature_getLatestChangeid(self): @self.assertArgSpecMatches(self.db.changes.getLatestChangeid) def getLatestChangeid(self): pass @defer.inlineCallbacks def test_getLatestChangeid(self): yield self.insertTestData(self.change13_rows) changeid = yield self.db.changes.getLatestChangeid() self.assertEqual(changeid, 13) @defer.inlineCallbacks def test_getLatestChangeid_empty(self): changeid = yield self.db.changes.getLatestChangeid() self.assertEqual(changeid, None) def test_signature_getParentChangeIds(self): @self.assertArgSpecMatches(self.db.changes.getParentChangeIds) def getParentChangeIds(self, branch, repository, project, codebase): pass @defer.inlineCallbacks def test_getParentChangeIds(self): yield self.insertTestData(self.change14_rows + self.change13_rows) changeid = yield self.db.changes.getParentChangeIds(branch='warnerdb', repository='git://warner', project='Buildbot', codebase='mainapp') self.assertEqual(changeid, [14]) class RealTests(Tests): # tests that only "real" implementations will pass @defer.inlineCallbacks def test_addChange(self): self.reactor.advance(SOMETIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=['master/LICENSING.txt', 'worker/LICENSING.txt'], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(266738400), branch='master', category=None, revlink=None, properties={'platform': ('linux', 'Change')}, repository='', codebase='cb', project='') # check all of the columns of the four relevant tables def thd_change(conn): self.assertEqual(changeid, 1) r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].author, 'dustin') self.assertEqual(r[0].committer, 'justin') self.assertEqual(r[0].comments, 'fix spelling') self.assertEqual(r[0].branch, 'master') self.assertEqual(r[0].revision, '2d6caa52') self.assertEqual(r[0].when_timestamp, 266738400) self.assertEqual(r[0].category, None) self.assertEqual(r[0].repository, '') self.assertEqual(r[0].codebase, 'cb') self.assertEqual(r[0].project, '') self.assertEqual(r[0].sourcestampid, 1) yield self.db.pool.do(thd_change) def thd_change_files(conn): query = self.db.model.change_files.select() query.where(self.db.model.change_files.c.changeid == 1) query.order_by(self.db.model.change_files.c.filename) r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 2) self.assertEqual(r[0].filename, 'master/LICENSING.txt') self.assertEqual(r[1].filename, 'worker/LICENSING.txt') yield self.db.pool.do(thd_change_files) def thd_change_properties(conn): query = self.db.model.change_properties.select() query.where(self.db.model.change_properties.c.changeid == 1) query.order_by(self.db.model.change_properties.c.property_name) r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].property_name, 'platform') self.assertEqual(r[0].property_value, '["linux", "Change"]') yield self.db.pool.do(thd_change_properties) def thd_change_users(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_users) def thd_change_sourcestamps(conn): query = self.db.model.sourcestamps.select() r = conn.execute(query) self.assertEqual([dict(row) for row in r.fetchall()], [{ 'branch': 'master', 'codebase': 'cb', 'id': 1, 'patchid': None, 'project': '', 'repository': '', 'revision': '2d6caa52', 'created_at': SOMETIME, 'ss_hash': 'b777dbd10d1d4c76651335f6a78e278e88b010d6', }]) yield self.db.pool.do(thd_change_sourcestamps) @defer.inlineCallbacks def test_addChange_when_timestamp_None(self): self.reactor.advance(OTHERTIME) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=None, branch='master', category=None, revlink=None, properties={}, repository='', codebase='', project='') # check all of the columns of the four relevant tables def thd(conn): r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].when_timestamp, OTHERTIME) yield self.db.pool.do(thd) def thd_change(conn): query = self.db.model.change_files.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change) def thd_change_file(conn): query = self.db.model.change_properties.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_file) def thd_change_properties(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_properties) @defer.inlineCallbacks def test_addChange_with_uid(self): yield self.insertTestData([ fakedb.User(uid=1, identifier="one"), ]) changeid = yield self.db.changes.addChange( author='dustin', committer='justin', files=[], comments='fix spelling', revision='2d6caa52', when_timestamp=epoch2datetime(OTHERTIME), branch='master', category=None, revlink=None, properties={}, repository='', codebase='', project='', uid=1) # check all of the columns of the five relevant tables def thd_change(conn): r = conn.execute(self.db.model.changes.select()) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, changeid) self.assertEqual(r[0].when_timestamp, OTHERTIME) yield self.db.pool.do(thd_change) def thd_change_files(conn): query = self.db.model.change_files.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_files) def thd_change_properties(conn): query = self.db.model.change_properties.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd_change_properties) def thd_change_users(conn): query = self.db.model.change_users.select() r = conn.execute(query) r = r.fetchall() self.assertEqual(len(r), 1) self.assertEqual(r[0].changeid, 1) self.assertEqual(r[0].uid, 1) yield self.db.pool.do(thd_change_users) @defer.inlineCallbacks def test_pruneChanges(self): yield self.insertTestData([ fakedb.Scheduler(id=29), fakedb.SourceStamp(id=234, branch='aa'), fakedb.SourceStamp(id=235, branch='bb'), fakedb.Change(changeid=11), fakedb.Change(changeid=12, sourcestampid=234), fakedb.SchedulerChange(schedulerid=29, changeid=12), ] + self.change13_rows + [ fakedb.SchedulerChange(schedulerid=29, changeid=13), ] + self.change14_rows + [ fakedb.SchedulerChange(schedulerid=29, changeid=14), fakedb.Change(changeid=15, sourcestampid=235), ] ) # pruning with a horizon of 2 should delete changes 11, 12 and 13 yield self.db.changes.pruneChanges(2) def thd(conn): results = {} for tbl_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes'): tbl = self.db.model.metadata.tables[tbl_name] res = conn.execute(sa.select([tbl.c.changeid])) results[tbl_name] = sorted( [row[0] for row in res.fetchall()]) self.assertEqual(results, { 'scheduler_changes': [14], 'change_files': [14], 'change_properties': [], 'changes': [14, 15], }) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_pruneChanges_lots(self): yield self.insertTestData([ fakedb.SourceStamp(id=29), ] + [ fakedb.Change(changeid=n, sourcestampid=29) for n in range(1, 151) ]) yield self.db.changes.pruneChanges(1) def thd(conn): results = {} for tbl_name in ('scheduler_changes', 'change_files', 'change_properties', 'changes'): tbl = self.db.model.metadata.tables[tbl_name] res = conn.execute(sa.select([sa.func.count()]).select_from(tbl)) results[tbl_name] = res.fetchone()[0] res.close() self.assertEqual(results, { 'scheduler_changes': 0, 'change_files': 0, 'change_properties': 0, 'changes': 1, }) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_pruneChanges_None(self): yield self.insertTestData(self.change13_rows) yield self.db.changes.pruneChanges(None) def thd(conn): tbl = self.db.model.changes res = conn.execute(tbl.select()) self.assertEqual([row.changeid for row in res.fetchall()], [13]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getChangesForBuild(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A')] lastID = {"changeid": 0, "sourcestampid": 0, "buildsetid": 0, "buildsetSourceStampid": 0, "buildrequestid": 0, "buildid": 0} codebase_ss = {} # shared state between addChange and addBuild def addChange(codebase, revision, author, committer, comments, branch='master', category='cat', project='proj', repository='repo'): lastID["sourcestampid"] += 1 lastID["changeid"] += 1 parent_changeids = codebase_ss.get(codebase, None) codebase_ss[codebase] = lastID["sourcestampid"] changeRows = [fakedb.SourceStamp(id=lastID["sourcestampid"], codebase=codebase, revision=revision), fakedb.Change(changeid=lastID["changeid"], author=author, committer=committer, comments=comments, revision=revision, sourcestampid=lastID["sourcestampid"], parent_changeids=parent_changeids, when_timestamp=SOMETIME + lastID["changeid"], branch=branch, category=category, project=project, repository=repository)] return changeRows def addBuild(codebase_ss, results=0): lastID["buildid"] += 1 lastID["buildsetid"] += 1 lastID["buildrequestid"] += 1 buildRows = [fakedb.Buildset(id=lastID["buildsetid"], reason='foo', submitted_at=1300305012, results=-1)] for cb, ss in codebase_ss.items(): lastID["buildsetSourceStampid"] += 1 buildRows.append( fakedb.BuildsetSourceStamp(id=lastID["buildsetSourceStampid"], sourcestampid=ss, buildsetid=lastID["buildsetid"])) codebase_ss.clear() buildRows.extend([ fakedb.BuildRequest(id=lastID["buildrequestid"], buildsetid=lastID["buildsetid"], builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=lastID["buildid"], buildrequestid=lastID["buildrequestid"], number=lastID["buildid"], masterid=88, builderid=77, state_string="test", workerid=13, started_at=SOMETIME + lastID["buildid"], complete_at=SOMETIME + 2 * lastID["buildid"], results=results)]) return buildRows # Build1 has 1 change per code base rows.extend(addChange('A', 1, 'franck', 'franck', '1st commit')) rows.extend(addChange('B', 1, 'alice', 'alice', '2nd commit')) rows.extend(addChange('C', 1, 'bob', 'bob', '3rd commit')) rows.extend(addBuild(codebase_ss)) # Build 2 has only one change for codebase A rows.extend(addChange('A', 2, 'delanne', 'delanne', '4th commit')) rows.extend(addBuild(codebase_ss)) # Build 3 has only one change for codebase B rows.extend(addChange('B', 2, 'bob', 'bob', '6th commit')) rows.extend(addBuild(codebase_ss)) # Build 4 has no change rows.extend(addBuild(codebase_ss)) # Build 5 has 2 changes for codebase A and 1 change for codebase C rows.extend(addChange('A', 3, 'franck', 'franck', '7th commit')) rows.extend(addChange('A', 4, 'alice', 'alice', '8th commit')) rows.extend(addChange('B', 3, 'bob', 'bob', '9th commit')) rows.extend(addBuild(codebase_ss)) # Build 6 has only one change for codebase C rows.extend(addChange('C', 2, 'bob', 'bob', '10th commit')) rows.extend(addBuild(codebase_ss, 2)) # Build 7 has only one change for codebase C rows.extend(addChange('C', 3, 'bob', 'bob', '11th commit')) rows.extend(addBuild(codebase_ss, 2)) yield self.insertTestData(rows) @defer.inlineCallbacks def expect(buildid, commits): got = yield self.db.changes.getChangesForBuild(buildid) got_commits = [c['comments'] for c in got] self.assertEqual(sorted(got_commits), sorted(commits)) yield expect(1, ['2nd commit', '3rd commit', '1st commit']) yield expect(2, ['4th commit']) yield expect(3, ['6th commit']) yield expect(4, []) yield expect(5, ['8th commit', '9th commit', '7th commit']) yield expect(6, ['10th commit']) yield expect(7, ['11th commit']) class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'change_files', 'change_properties', 'scheduler_changes', 'schedulers', 'sourcestampsets', 'sourcestamps', 'patches', 'change_users', 'users', 'buildsets', 'workers', 'builders', 'masters', 'buildrequests', 'builds', 'buildset_sourcestamps', 'workers']) self.db.changes = changes.ChangesConnectorComponent(self.db) self.db.builds = builds.BuildsConnectorComponent(self.db) self.db.sourcestamps = \ sourcestamps.SourceStampsConnectorComponent(self.db) self.master = self.db.master self.master.db = self.db def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_changesources.py000066400000000000000000000273261361162603000252770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import changesources from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin def changeSourceKey(changeSource): return changeSource['id'] class Tests(interfaces.InterfaceTests): # test data cs42 = fakedb.ChangeSource(id=42, name='cool_source') cs87 = fakedb.ChangeSource(id=87, name='lame_source') master13 = fakedb.Master(id=13, name='m1', active=1) cs42master13 = fakedb.ChangeSourceMaster(changesourceid=42, masterid=13) master14 = fakedb.Master(id=14, name='m2', active=0) cs87master14 = fakedb.ChangeSourceMaster(changesourceid=87, masterid=14) # tests def test_signature_findChangeSourceId(self): """The signature of findChangeSourceId is correct""" @self.assertArgSpecMatches(self.db.changesources.findChangeSourceId) def findChangeSourceId(self, name): pass @defer.inlineCallbacks def test_findChangeSourceId_new(self): """findChangeSourceId for a new changesource creates it""" id = yield self.db.changesources.findChangeSourceId('csname') cs = yield self.db.changesources.getChangeSource(id) self.assertEqual(cs['name'], 'csname') @defer.inlineCallbacks def test_findChangeSourceId_existing(self): """findChangeSourceId gives the same answer for the same inputs""" id1 = yield self.db.changesources.findChangeSourceId('csname') id2 = yield self.db.changesources.findChangeSourceId('csname') self.assertEqual(id1, id2) def test_signature_setChangeSourceMaster(self): """setChangeSourceMaster has the right signature""" @self.assertArgSpecMatches(self.db.changesources.setChangeSourceMaster) def setChangeSourceMaster(self, changesourceid, masterid): pass @defer.inlineCallbacks def test_setChangeSourceMaster_fresh(self): """setChangeSourceMaster with a good pair""" yield self.insertTestData([self.cs42, self.master13]) yield self.db.changesources.setChangeSourceMaster(42, 13) cs = yield self.db.changesources.getChangeSource(42) self.assertEqual(cs['masterid'], 13) @defer.inlineCallbacks def test_setChangeSourceMaster_inactive_but_linked(self): """Inactive changesource but already claimed by an active master""" d = self.insertTestData([ self.cs87, self.master13, self.master14, self.cs87master14, ]) d.addCallback(lambda _: self.db.changesources.setChangeSourceMaster(87, 13)) yield self.assertFailure(d, changesources.ChangeSourceAlreadyClaimedError) @defer.inlineCallbacks def test_setChangeSourceMaster_active(self): """Active changesource already claimed by an active master""" d = self.insertTestData([ self.cs42, self.master13, self.cs42master13, ]) d.addCallback(lambda _: self.db.changesources.setChangeSourceMaster(42, 14)) yield self.assertFailure(d, changesources.ChangeSourceAlreadyClaimedError) @defer.inlineCallbacks def test_setChangeSourceMaster_None(self): """A 'None' master disconnects the changesource""" yield self.insertTestData([ self.cs87, self.master14, self.cs87master14, ]) yield self.db.changesources.setChangeSourceMaster(87, None) cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs['masterid'], None) @defer.inlineCallbacks def test_setChangeSourceMaster_None_unowned(self): """A 'None' master for a disconnected changesource""" yield self.insertTestData([self.cs87]) yield self.db.changesources.setChangeSourceMaster(87, None) cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs['masterid'], None) def test_signature_getChangeSource(self): """getChangeSource has the right signature""" @self.assertArgSpecMatches(self.db.changesources.getChangeSource) def getChangeSource(self, changesourceid): pass @defer.inlineCallbacks def test_getChangeSource(self): """getChangeSource for a changesource that exists""" yield self.insertTestData([self.cs87]) cs = yield self.db.changesources.getChangeSource(87) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=87, name='lame_source', masterid=None)) @defer.inlineCallbacks def test_getChangeSource_missing(self): """getChangeSource for a changesource that doesn't exist""" cs = yield self.db.changesources.getChangeSource(87) self.assertEqual(cs, None) @defer.inlineCallbacks def test_getChangeSource_active(self): """getChangeSource for a changesource that exists and is active""" yield self.insertTestData([self.cs42, self.master13, self.cs42master13]) cs = yield self.db.changesources.getChangeSource(42) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=42, name='cool_source', masterid=13)) @defer.inlineCallbacks def test_getChangeSource_inactive_but_linked(self): """getChangeSource for a changesource that is assigned but is inactive""" yield self.insertTestData([self.cs87, self.master14, self.cs87master14]) cs = yield self.db.changesources.getChangeSource(87) validation.verifyDbDict(self, 'changesourcedict', cs) self.assertEqual(cs, dict( id=87, name='lame_source', masterid=14)) # row exists, but marked inactive def test_signature_getChangeSources(self): """getChangeSources has right signature""" @self.assertArgSpecMatches(self.db.changesources.getChangeSources) def getChangeSources(self, active=None, masterid=None): pass @defer.inlineCallbacks def test_getChangeSources(self): """getChangeSources returns all changesources""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87, ]) cslist = yield self.db.changesources.getChangeSources() [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist, key=changeSourceKey), sorted([ dict(id=42, name='cool_source', masterid=13), dict(id=87, name='lame_source', masterid=None), ], key=changeSourceKey)) @defer.inlineCallbacks def test_getChangeSources_masterid(self): """getChangeSources returns all changesources for a given master""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87, ]) cslist = yield self.db.changesources.getChangeSources(masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist, key=changeSourceKey), sorted([ dict(id=42, name='cool_source', masterid=13), ], key=changeSourceKey)) @defer.inlineCallbacks def test_getChangeSources_active(self): """getChangeSources for (active changesources, all masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources(active=True) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=42, name='cool_source', masterid=13), ])) @defer.inlineCallbacks def test_getChangeSources_active_masterid(self): """getChangeSources returns (active changesources, given masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources( active=True, masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=42, name='cool_source', masterid=13), ])) cslist = yield self.db.changesources.getChangeSources( active=True, masterid=14) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) @defer.inlineCallbacks def test_getChangeSources_inactive(self): """getChangeSources returns (inactive changesources, all masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources(active=False) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), sorted([ dict(id=87, name='lame_source', masterid=None), ])) @defer.inlineCallbacks def test_getChangeSources_inactive_masterid(self): """getChangeSources returns (active changesources, given masters)""" yield self.insertTestData([ self.cs42, self.master13, self.cs42master13, self.cs87 ]) cslist = yield self.db.changesources.getChangeSources( active=False, masterid=13) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) cslist = yield self.db.changesources.getChangeSources( active=False, masterid=14) [validation.verifyDbDict(self, 'changesourcedict', cs) for cs in cslist] self.assertEqual(sorted(cslist), []) # always returns [] by spec! class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'changesources', 'masters', 'patches', 'sourcestamps', 'changesource_masters']) self.db.changesources = \ changesources.ChangeSourcesConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_connector.py000066400000000000000000000065611361162603000244360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.db import connector from buildbot.db import exceptions from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util.misc import TestReactorMixin class TestDBConnector(TestReactorMixin, db.RealDatabaseMixin, unittest.TestCase): """ Basic tests of the DBConnector class - all start with an empty DB """ @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpRealDatabase(table_names=[ 'changes', 'change_properties', 'change_files', 'patches', 'sourcestamps', 'buildset_properties', 'buildsets', 'sourcestampsets', 'builds', 'builders', 'masters', 'buildrequests', 'workers']) self.master = fakemaster.make_master(self) self.master.config = config.MasterConfig() self.db = connector.DBConnector(os.path.abspath('basedir')) yield self.db.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): if self.db.running: yield self.db.stopService() yield self.tearDownRealDatabase() @defer.inlineCallbacks def startService(self, check_version=False): self.master.config.db['db_url'] = self.db_url yield self.db.setup(check_version=check_version) self.db.startService() yield self.db.reconfigServiceWithBuildbotConfig(self.master.config) # tests @defer.inlineCallbacks def test_doCleanup_service(self): yield self.startService() self.assertTrue(self.db.cleanup_timer.running) def test_doCleanup_unconfigured(self): self.db.changes.pruneChanges = mock.Mock( return_value=defer.succeed(None)) self.db._doCleanup() self.assertFalse(self.db.changes.pruneChanges.called) @defer.inlineCallbacks def test_doCleanup_configured(self): self.db.changes.pruneChanges = mock.Mock( return_value=defer.succeed(None)) yield self.startService() self.db._doCleanup() self.assertTrue(self.db.changes.pruneChanges.called) def test_setup_check_version_bad(self): if self.db_url == 'sqlite://': raise unittest.SkipTest( 'sqlite in-memory model is always upgraded at connection') d = self.startService(check_version=True) return self.assertFailure(d, exceptions.DatabaseNotReadyError) def test_setup_check_version_good(self): self.db.model.is_current = lambda: defer.succeed(True) return self.startService(check_version=True) buildbot-2.6.0/master/buildbot/test/unit/test_db_dbconfig.py000066400000000000000000000100671361162603000242130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import threads from twisted.trial import unittest from buildbot.db import dbconfig from buildbot.test.util import db class TestDbConfig(db.RealDatabaseMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): # as we will open the db twice, we can't use in memory sqlite yield self.setUpRealDatabase(table_names=['objects', 'object_state'], sqlite_memory=False) yield threads.deferToThread(self.createDbConfig) def createDbConfig(self): self.dbConfig = dbconfig.DbConfig( {"db_url": self.db_url}, self.basedir) def tearDown(self): return self.tearDownRealDatabase() def test_basic(self): def thd(): workersInDB = ['foo', 'bar'] self.dbConfig.set("workers", workersInDB) workers = self.dbConfig.get("workers") self.assertEqual(workers, workersInDB) return threads.deferToThread(thd) def test_default(self): def thd(): workers = self.dbConfig.get("workers", "default") self.assertEqual(workers, "default") return threads.deferToThread(thd) def test_error(self): def thd(): with self.assertRaises(KeyError): self.dbConfig.get("workers") return threads.deferToThread(thd) # supports the 3 different ways to declare db_url in the master.cfg def test_init1(self): obj = dbconfig.DbConfig({"db_url": self.db_url}, self.basedir) self.assertEqual(obj.db_url, self.db_url) def test_init2(self): obj = dbconfig.DbConfig({"db": {"db_url": self.db_url}}, self.basedir) self.assertEqual(obj.db_url, self.db_url) def test_init3(self): obj = dbconfig.DbConfig({}, self.basedir) self.assertEqual(obj.db_url, "sqlite:///state.sqlite") class TestDbConfigNotInitialized(db.RealDatabaseMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): # as we will open the db twice, we can't use in memory sqlite yield self.setUpRealDatabase(table_names=[], sqlite_memory=False) @defer.inlineCallbacks def tearDown(self): yield self.tearDownRealDatabase() def createDbConfig(self, db_url=None): return dbconfig.DbConfig({"db_url": db_url or self.db_url}, self.basedir) def test_default(self): def thd(): db = self.createDbConfig() self.assertEqual("foo", db.get("workers", "foo")) return threads.deferToThread(thd) def test_error(self): def thd(): db = self.createDbConfig() with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url(self): def thd(): db = self.createDbConfig("garbage://") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url2(self): def thd(): db = self.createDbConfig("trash") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) def test_bad_url3(self): def thd(): db = self.createDbConfig("sqlite://bad") with self.assertRaises(KeyError): db.get("workers") return threads.deferToThread(thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_enginestrategy.py000066400000000000000000000174671361162603000255030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from sqlalchemy.engine import url from sqlalchemy.pool import NullPool from twisted.python import runtime from twisted.trial import unittest from buildbot.db import enginestrategy class BuildbotEngineStrategy_special_cases(unittest.TestCase): "Test the special case methods, without actually creating a db" # used several times below mysql_kwargs = dict( basedir='my-base-dir', connect_args=dict(init_command='SET default_storage_engine=MyISAM'), pool_recycle=3600) sqlite_kwargs = dict(basedir='/my-base-dir', poolclass=NullPool) def setUp(self): self.strat = enginestrategy.BuildbotEngineStrategy() # utility def filter_kwargs(self, kwargs): # filter out the listeners list to just include the class name if 'listeners' in kwargs: kwargs['listeners'] = [lstnr.__class__.__name__ for lstnr in kwargs['listeners']] return kwargs # tests def test_sqlite_pct_sub(self): u = url.make_url("sqlite:///%(basedir)s/x/state.sqlite") kwargs = dict(basedir='/my-base-dir') u, kwargs, max_conns = self.strat.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite:////my-base-dir/x/state.sqlite", 1, self.sqlite_kwargs]) def test_sqlite_relpath(self): url_src = "sqlite:///x/state.sqlite" basedir = "/my-base-dir" expected_url = "sqlite:////my-base-dir/x/state.sqlite" # this looks a whole lot different on windows if runtime.platformType == 'win32': url_src = r'sqlite:///X\STATE.SQLITE' basedir = r'C:\MYBASE~1' expected_url = r'sqlite:///C:\MYBASE~1\X\STATE.SQLITE' exp_kwargs = self.sqlite_kwargs.copy() exp_kwargs['basedir'] = basedir u = url.make_url(url_src) kwargs = dict(basedir=basedir) u, kwargs, max_conns = self.strat.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], [expected_url, 1, exp_kwargs]) def test_sqlite_abspath(self): u = url.make_url("sqlite:////x/state.sqlite") kwargs = dict(basedir='/my-base-dir') u, kwargs, max_conns = self.strat.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite:////x/state.sqlite", 1, self.sqlite_kwargs]) def test_sqlite_memory(self): u = url.make_url("sqlite://") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_sqlite(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["sqlite://", 1, # only one conn at a time dict(basedir='my-base-dir', connect_args=dict(check_same_thread=False))]) def test_mysql_simple(self): u = url.make_url("mysql://host/dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql://host/dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_userport(self): u = url.make_url("mysql://user:pass@host:1234/dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql://user:pass@host:1234/dbname?" "charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_local(self): u = url.make_url("mysql:///dbname") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_args(self): u = url.make_url("mysql:///dbname?foo=bar") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&foo=bar&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_max_idle(self): u = url.make_url("mysql:///dbname?max_idle=1234") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) exp = self.mysql_kwargs.copy() exp['pool_recycle'] = 1234 self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, exp]) def test_mysql_good_charset(self): u = url.make_url("mysql:///dbname?charset=utf8") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_bad_charset(self): u = url.make_url("mysql:///dbname?charset=ebcdic") kwargs = dict(basedir='my-base-dir') with self.assertRaises(TypeError): self.strat.special_case_mysql(u, kwargs) def test_mysql_good_use_unicode(self): u = url.make_url("mysql:///dbname?use_unicode=True") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, self.mysql_kwargs]) def test_mysql_bad_use_unicode(self): u = url.make_url("mysql:///dbname?use_unicode=maybe") kwargs = dict(basedir='my-base-dir') with self.assertRaises(TypeError): self.strat.special_case_mysql(u, kwargs) def test_mysql_storage_engine(self): u = url.make_url("mysql:///dbname?storage_engine=foo") kwargs = dict(basedir='my-base-dir') u, kwargs, max_conns = self.strat.special_case_mysql(u, kwargs) exp = self.mysql_kwargs.copy() exp['connect_args'] = dict( init_command='SET default_storage_engine=foo') self.assertEqual([str(u), max_conns, self.filter_kwargs(kwargs)], ["mysql:///dbname?charset=utf8&use_unicode=True", None, exp]) class BuildbotEngineStrategy(unittest.TestCase): "Test create_engine by creating a sqlite in-memory db" def test_create_engine(self): engine = enginestrategy.create_engine('sqlite://', basedir="/base") self.assertEqual(engine.scalar("SELECT 13 + 14"), 27) buildbot-2.6.0/master/buildbot/test/unit/test_db_logs.py000066400000000000000000000561251361162603000234110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import bz2 import textwrap import zlib import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.db import logs from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes class Tests(interfaces.InterfaceTests): TIMESTAMP_STEP101 = 100000 TIMESTAMP_STEP102 = 200000 backgroundData = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Step(id=101, buildid=30, number=1, name='one', started_at=TIMESTAMP_STEP101), fakedb.Step(id=102, buildid=30, number=2, name='two', started_at=TIMESTAMP_STEP102), ] testLogLines = [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=7, type='s'), fakedb.LogChunk(logid=201, first_line=0, last_line=1, compressed=0, content=textwrap.dedent("""\ line zero line 1""" + "x" * 200)), fakedb.LogChunk(logid=201, first_line=2, last_line=4, compressed=0, content=textwrap.dedent("""\ line TWO line 2**2""")), fakedb.LogChunk(logid=201, first_line=5, last_line=5, compressed=0, content="another line"), fakedb.LogChunk(logid=201, first_line=6, last_line=6, compressed=0, content="yet another line"), ] bug3101Content = base64.b64decode(""" PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0 9PT09PT09PT09PT09PT09PT09PT09PT09PT09PQpbU0tJUFBFRF0Kbm90IGEgd2luMz IgcGxhdGZvcm0KCmJ1aWxkc2xhdmUudGVzdC51bml0LnRlc3RfcnVucHJvY2Vzcy5UZ XN0UnVuUHJvY2Vzcy50ZXN0UGlwZVN0cmluZwotLS0tLS0tLS0tLS0tLS0tLS0tLS0t LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0 tLS0tLS0tClJhbiAyNjcgdGVzdHMgaW4gNS4zNzhzCgpQQVNTRUQgKHNraXBzPTEsIH N1Y2Nlc3Nlcz0yNjYpCnByb2dyYW0gZmluaXNoZWQgd2l0aCBleGl0IGNvZGUgMAplb GFwc2VkVGltZT04LjI0NTcwMg==""") bug3101Rows = [ fakedb.Log(id=1470, stepid=101, name='problems', slug='problems', complete=1, num_lines=11, type='t'), fakedb.LogChunk(logid=1470, first_line=0, last_line=10, compressed=0, content=bug3101Content), ] @defer.inlineCallbacks def checkTestLogLines(self): expLines = ['line zero', 'line 1' + "x" * 200, 'line TWO', '', 'line 2**2', 'another line', 'yet another line'] for first_line in range(0, 7): for last_line in range(first_line, 7): got_lines = yield self.db.logs.getLogLines( 201, first_line, last_line) self.assertEqual( got_lines, "\n".join(expLines[first_line:last_line + 1] + [""])) # check overflow self.assertEqual((yield self.db.logs.getLogLines(201, 5, 20)), "\n".join(expLines[5:7] + [""])) # signature tests def test_signature_getLog(self): @self.assertArgSpecMatches(self.db.logs.getLog) def getLog(self, logid): pass def test_signature_getLogBySlug(self): @self.assertArgSpecMatches(self.db.logs.getLogBySlug) def getLogBySlug(self, stepid, slug): pass def test_signature_getLogs(self): @self.assertArgSpecMatches(self.db.logs.getLogs) def getLogs(self, stepid=None): pass def test_signature_getLogLines(self): @self.assertArgSpecMatches(self.db.logs.getLogLines) def getLogLines(self, logid, first_line, last_line): pass def test_signature_addLog(self): @self.assertArgSpecMatches(self.db.logs.addLog) def addLog(self, stepid, name, slug, type): pass def test_signature_appendLog(self): @self.assertArgSpecMatches(self.db.logs.appendLog) def appendLog(self, logid, content): pass def test_signature_finishLog(self): @self.assertArgSpecMatches(self.db.logs.finishLog) def finishLog(self, logid): pass def test_signature_compressLog(self): @self.assertArgSpecMatches(self.db.logs.compressLog) def compressLog(self, logid, force=False): pass def test_signature_deleteOldLogChunks(self): @self.assertArgSpecMatches(self.db.logs.deleteOldLogChunks) def deleteOldLogChunks(self, older_than_timestamp): pass # method tests @defer.inlineCallbacks def test_getLog(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLog(201) validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict, { 'id': 201, 'stepid': 101, 'name': 'stdio', 'slug': 'stdio', 'complete': False, 'num_lines': 200, 'type': 's', }) @defer.inlineCallbacks def test_getLog_missing(self): logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, None) @defer.inlineCallbacks def test_getLogBySlug(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), fakedb.Log(id=202, stepid=101, name='dbg.log', slug='dbg_log', complete=1, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLogBySlug(101, 'dbg_log') validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict['id'], 202) @defer.inlineCallbacks def test_getLogBySlug_missing(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdict = yield self.db.logs.getLogBySlug(102, 'stdio') self.assertEqual(logdict, None) @defer.inlineCallbacks def test_getLogs(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), fakedb.Log(id=202, stepid=101, name='dbg.log', slug='dbg_log', complete=1, num_lines=300, type='t'), fakedb.Log(id=203, stepid=102, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) logdicts = yield self.db.logs.getLogs(101) for logdict in logdicts: validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(sorted([ld['id'] for ld in logdicts]), [201, 202]) @defer.inlineCallbacks def test_getLogLines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) yield self.checkTestLogLines() # check line number reversal self.assertEqual((yield self.db.logs.getLogLines(201, 6, 3)), '') @defer.inlineCallbacks def test_getLogLines_empty(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=200, type='s'), ]) self.assertEqual((yield self.db.logs.getLogLines(201, 9, 99)), '') self.assertEqual((yield self.db.logs.getLogLines(999, 9, 99)), '') @defer.inlineCallbacks def test_getLogLines_bug3101(self): # regression test for #3101 content = self.bug3101Content yield self.insertTestData(self.backgroundData + self.bug3101Rows) # overall content is the same, with '\n' padding at the end expected = bytes2unicode(self.bug3101Content + b'\n') self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 99)), expected) # try to fetch just one line expected = bytes2unicode(content.split(b'\n')[0] + b'\n') self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 0)), expected) @defer.inlineCallbacks def test_addLog_getLog(self): yield self.insertTestData(self.backgroundData) logid = yield self.db.logs.addLog( stepid=101, name='config.log', slug='config_log', type='t') logdict = yield self.db.logs.getLog(logid) validation.verifyDbDict(self, 'logdict', logdict) self.assertEqual(logdict, { 'id': logid, 'stepid': 101, 'name': 'config.log', 'slug': 'config_log', 'complete': False, 'num_lines': 0, 'type': 't', }) @defer.inlineCallbacks def test_appendLog_getLogLines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) logid = yield self.db.logs.addLog( stepid=102, name='another', slug='another', type='s') self.assertEqual((yield self.db.logs.appendLog(logid, 'xyz\n')), (0, 0)) self.assertEqual((yield self.db.logs.appendLog(201, 'abc\ndef\n')), (7, 8)) self.assertEqual((yield self.db.logs.appendLog(logid, 'XYZ\n')), (1, 1)) self.assertEqual((yield self.db.logs.getLogLines(201, 6, 7)), "yet another line\nabc\n") self.assertEqual((yield self.db.logs.getLogLines(201, 7, 8)), "abc\ndef\n") self.assertEqual((yield self.db.logs.getLogLines(201, 8, 8)), "def\n") self.assertEqual((yield self.db.logs.getLogLines(logid, 0, 1)), "xyz\nXYZ\n") self.assertEqual((yield self.db.logs.getLog(logid)), { 'complete': False, 'id': logid, 'name': 'another', 'slug': 'another', 'num_lines': 2, 'stepid': 102, 'type': 's', }) @defer.inlineCallbacks def test_compressLog(self): yield self.insertTestData(self.backgroundData + self.testLogLines) yield self.db.logs.compressLog(201) # test log lines should still be readable just the same yield self.checkTestLogLines() @defer.inlineCallbacks def test_addLogLines_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\n' * 20000)), # 80k (7, 20006)) lines = yield self.db.logs.getLogLines(201, 7, 50000) self.assertEqual(len(lines), 80000) self.assertEqual(lines, ('abc\n' * 20000)) @defer.inlineCallbacks def test_addLogLines_big_chunk_big_lines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'x' * 33000 + '\n' self.assertEqual((yield self.db.logs.appendLog(201, line * 3)), (7, 9)) # three long lines, all truncated lines = yield self.db.logs.getLogLines(201, 7, 100) self.assertEqual(len(lines), 99003) self.assertEqual(lines, (line * 3)) class RealTests(Tests): @defer.inlineCallbacks def test_addLogLines_db(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\ndef\nghi\njkl\n')), (7, 10)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 10, 'content': b'abc\ndef\nghi\njkl', 'compressed': 0}) @defer.inlineCallbacks def test_addLogLines_huge_lines(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 70000 + '\n' yield self.db.logs.appendLog(201, line * 3) for lineno in 7, 8, 9: line = yield self.db.logs.getLogLines(201, lineno, lineno) self.assertEqual(len(line), 65537) def test_splitBigChunk_unicode_misalignment(self): unaligned = ('a ' + '\N{SNOWMAN}' * 30000 + '\n').encode('utf-8') # the first 65536 bytes of that line are not valid utf-8 with self.assertRaises(UnicodeDecodeError): unaligned[:65536].decode('utf-8') chunk, remainder = self.db.logs._splitBigChunk(unaligned, 1) # see that it was truncated by two bytes, and now properly decodes self.assertEqual(len(chunk), 65534) chunk.decode('utf-8') @defer.inlineCallbacks def test_no_compress_small_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) self.db.master.config.logCompressionMethod = "gz" self.assertEqual( (yield self.db.logs.appendLog(201, 'abc\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': b'abc', 'compressed': 0}) @defer.inlineCallbacks def test_raw_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "raw" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': unicode2bytes(line), 'compressed': 0}) @defer.inlineCallbacks def test_gz_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "gz" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': zlib.compress(unicode2bytes(line), 9), 'compressed': 1}) @defer.inlineCallbacks def test_bz2_compress_big_chunk(self): yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "bz2" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': bz2.compress(unicode2bytes(line), 9), 'compressed': 2}) @defer.inlineCallbacks def test_lz4_compress_big_chunk(self): try: import lz4 # noqa pylint: disable=unused-import,import-outside-toplevel except ImportError: raise unittest.SkipTest("lz4 not installed, skip the test") yield self.insertTestData(self.backgroundData + self.testLogLines) line = 'xy' * 10000 self.db.master.config.logCompressionMethod = "lz4" self.assertEqual( (yield self.db.logs.appendLog(201, line + '\n')), (7, 7)) def thd(conn): res = conn.execute(self.db.model.logchunks.select( whereclause=self.db.model.logchunks.c.first_line > 6)) row = res.fetchone() res.close() return dict(row) newRow = yield self.db.pool.do(thd) self.assertEqual(newRow, { 'logid': 201, 'first_line': 7, 'last_line': 7, 'content': logs.dumps_lz4(line.encode('utf-8')), 'compressed': 3}) @defer.inlineCallbacks def do_addLogLines_huge_log(self, NUM_CHUNKS=3000, chunk=('xy' * 70 + '\n') * 3): if chunk.endswith("\n"): chunk = chunk[:-1] linesperchunk = chunk.count("\n") + 1 test_data = [ fakedb.LogChunk(logid=201, first_line=i * linesperchunk, last_line=i * linesperchunk + linesperchunk - 1, compressed=0, content=chunk) for i in range(NUM_CHUNKS) ] yield self.insertTestData( self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=0, num_lines=NUM_CHUNKS * 3, type='s')] + test_data) wholeLog = yield self.db.logs.getLogLines(201, 0, NUM_CHUNKS * 3) for i in range(10): yield self.db.logs.compressLog(201) wholeLog2 = yield self.db.logs.getLogLines(201, 0, NUM_CHUNKS * 3) self.assertEqual(wholeLog, wholeLog2) self.assertEqual(wholeLog, wholeLog2) def countChunk(conn): tbl = self.db.model.logchunks q = sa.select([sa.func.count(tbl.c.content)]) q = q.where(tbl.c.logid == 201) return conn.execute(q).fetchone()[0] chunks = yield self.db.pool.do(countChunk) # make sure MAX_CHUNK_LINES is taken in account self.assertGreaterEqual( chunks, NUM_CHUNKS * linesperchunk / logs.LogsConnectorComponent.MAX_CHUNK_LINES) def test_addLogLines_huge_log(self): return self.do_addLogLines_huge_log() def test_addLogLines_huge_log_lots_line(self): return self.do_addLogLines_huge_log(NUM_CHUNKS=3000, chunk='x\n' * 50) def test_addLogLines_huge_log_lots_snowmans(self): return self.do_addLogLines_huge_log(NUM_CHUNKS=3000, chunk='\N{SNOWMAN}\n' * 50) @defer.inlineCallbacks def test_compressLog_non_existing_log(self): yield self.db.logs.compressLog(201) logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, None) @defer.inlineCallbacks def test_compressLog_empty_log(self): yield self.insertTestData(self.backgroundData + [ fakedb.Log(id=201, stepid=101, name='stdio', slug='stdio', complete=1, num_lines=0, type='s'), ]) yield self.db.logs.compressLog(201) logdict = yield self.db.logs.getLog(201) self.assertEqual(logdict, { 'stepid': 101, 'num_lines': 0, 'name': 'stdio', 'id': 201, 'type': 's', 'slug': 'stdio', 'complete': True}) @defer.inlineCallbacks def test_deleteOldLogChunks_basic(self): yield self.insertTestData(self.backgroundData) logids = [] for stepid in (101, 102): for i in range(stepid): logid = yield self.db.logs.addLog( stepid=stepid, name='another' + str(i), slug='another' + str(i), type='s') yield self.db.logs.appendLog(logid, 'xyz\n') logids.append(logid) deleted_chunks = yield self.db.logs.deleteOldLogChunks( (self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) / 2) self.assertEqual(deleted_chunks, 101) deleted_chunks = yield self.db.logs.deleteOldLogChunks( self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) self.assertEqual(deleted_chunks, 102) deleted_chunks = yield self.db.logs.deleteOldLogChunks( self.TIMESTAMP_STEP102 + self.TIMESTAMP_STEP101) self.assertEqual(deleted_chunks, 0) deleted_chunks = yield self.db.logs.deleteOldLogChunks(0) self.assertEqual(deleted_chunks, 0) for logid in logids: logdict = yield self.db.logs.getLog(logid) self.assertEqual(logdict['type'], 'd') # we make sure we can still getLogLines, it will just return empty value lines = yield self.db.logs.getLogLines(logid, 0, logdict['num_lines']) self.assertEqual(lines, '') class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) self.db.logs = logs.LogsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_masters.py000066400000000000000000000204621361162603000241160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import masters from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime SOMETIME = 1348971992 SOMETIME_DT = epoch2datetime(SOMETIME) OTHERTIME = 1008971992 OTHERTIME_DT = epoch2datetime(OTHERTIME) class Tests(interfaces.InterfaceTests): # common sample data master_row = [ fakedb.Master(id=7, name="some:master", active=1, last_active=SOMETIME), ] # tests def test_signature_findMasterId(self): @self.assertArgSpecMatches(self.db.masters.findMasterId) def findMasterId(self, name): pass def test_signature_setMasterState(self): @self.assertArgSpecMatches(self.db.masters.setMasterState) def setMasterState(self, masterid, active): pass def test_signature_getMaster(self): @self.assertArgSpecMatches(self.db.masters.getMaster) def getMaster(self, masterid): pass def test_signature_getMasters(self): @self.assertArgSpecMatches(self.db.masters.getMasters) def getMasters(self): pass @defer.inlineCallbacks def test_findMasterId_new(self): id = yield self.db.masters.findMasterId('some:master') masterdict = yield self.db.masters.getMaster(id) self.assertEqual(masterdict, dict(id=id, name='some:master', active=False, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_findMasterId_exists(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master'), ]) id = yield self.db.masters.findMasterId('some:master') self.assertEqual(id, 7) @defer.inlineCallbacks def test_setMasterState_when_missing(self): activated = \ yield self.db.masters.setMasterState(masterid=7, active=True) self.assertFalse(activated) @defer.inlineCallbacks def test_setMasterState_true_when_active(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), ]) activated = yield self.db.masters.setMasterState( masterid=7, active=True) self.assertFalse(activated) # it was already active masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=True, last_active=SOMETIME_DT)) # timestamp updated @defer.inlineCallbacks def test_setMasterState_true_when_inactive(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=OTHERTIME), ]) activated = yield self.db.masters.setMasterState( masterid=7, active=True) self.assertTrue(activated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=True, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_setMasterState_false_when_active(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertTrue(deactivated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=OTHERTIME_DT)) @defer.inlineCallbacks def test_setMasterState_false_when_inactive(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=OTHERTIME), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertFalse(deactivated) masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=OTHERTIME_DT)) @defer.inlineCallbacks def test_getMaster(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), ]) masterdict = yield self.db.masters.getMaster(7) validation.verifyDbDict(self, 'masterdict', masterdict) self.assertEqual(masterdict, dict(id=7, name='some:master', active=False, last_active=SOMETIME_DT)) @defer.inlineCallbacks def test_getMaster_missing(self): masterdict = yield self.db.masters.getMaster(7) self.assertEqual(masterdict, None) @defer.inlineCallbacks def test_getMasters(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=0, last_active=SOMETIME), fakedb.Master(id=8, name='other:master', active=1, last_active=OTHERTIME), ]) masterlist = yield self.db.masters.getMasters() for masterdict in masterlist: validation.verifyDbDict(self, 'masterdict', masterdict) def masterKey(master): return master['id'] expected = sorted([ dict(id=7, name='some:master', active=0, last_active=SOMETIME_DT), dict(id=8, name='other:master', active=1, last_active=OTHERTIME_DT), ], key=masterKey) self.assertEqual(sorted(masterlist, key=masterKey), expected) class RealTests(Tests): # tests that only "real" implementations will pass @defer.inlineCallbacks def test_setMasterState_false_deletes_links(self): yield self.insertTestData([ fakedb.Master(id=7, name='some:master', active=1, last_active=OTHERTIME), fakedb.Scheduler(id=21), fakedb.SchedulerMaster(schedulerid=21, masterid=7), ]) deactivated = yield self.db.masters.setMasterState( masterid=7, active=False) self.assertTrue(deactivated) # check that the scheduler_masters row was deleted def thd(conn): tbl = self.db.model.scheduler_masters self.assertEqual(conn.execute(tbl.select()).fetchall(), []) yield self.db.pool.do(thd) class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.reactor.advance(SOMETIME) self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['masters', 'schedulers', 'scheduler_masters']) self.reactor.advance(SOMETIME) self.db.masters = masters.MastersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_040_add_builder_tags.py000066400000000000000000000045261361162603000317420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), # builder's name sa.Column('name', sa.Text, nullable=False), sa.Column('description', sa.Text, nullable=True), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) builders.create() conn.execute(builders.insert(), [ dict(name='bname', name_hash='dontcare')]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table('builders', metadata, autoload=True) self.assertIsInstance(builders.c.tags.type, sa.Text) q = sa.select([builders.c.name, builders.c.tags]) num_rows = 0 for row in conn.execute(q): # verify that the default value was set correctly self.assertEqual(row.tags, None) num_rows += 1 self.assertEqual(num_rows, 1) return self.do_test_migration(39, 40, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_041_add_N_N_tagsbuilders.py000066400000000000000000000056061361162603000325210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), # builder's name sa.Column('name', sa.Text, nullable=False), sa.Column('tags', sa.Text), sa.Column('description', sa.Text, nullable=True), # sha1 of name; used for a unique index sa.Column('name_hash', sa.String(40), nullable=False), ) builders.create() conn.execute(builders.insert(), [ dict(name='bname', tags='tag', description='description', name_hash='dontcare')]) def test_migration(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table('builders', metadata, autoload=True) q = sa.select([builders]) num_rows = 0 for row in conn.execute(q): self.assertEqual( row, (1, 'bname', 'description', 'dontcare')) num_rows += 1 self.assertEqual(num_rows, 1) tags = sautils.Table('tags', metadata, autoload=True) builders_tags = sautils.Table('builders_tags', metadata, autoload=True) q = sa.select([tags.c.id, tags.c.name, tags.c.name_hash]) self.assertEqual(conn.execute(q).fetchall(), []) q = sa.select([builders_tags.c.id, builders_tags.c.builderid, builders_tags.c.tagid]) self.assertEqual(conn.execute(q).fetchall(), []) return self.do_test_migration(40, 41, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_042_add_build_properties_table.py000066400000000000000000000035451361162603000340220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def test_migration(self): def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ).create() def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn build_properties = sautils.Table( 'build_properties', metadata, autoload=True) q = sa.select([build_properties.c.buildid, build_properties.c.name, build_properties.c.value, build_properties.c.source]) self.assertEqual(conn.execute(q).fetchall(), []) return self.do_test_migration(41, 42, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_043_add_changes_parent.py000066400000000000000000000074561361162603000322670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('patchlevel', sa.Integer, nullable=False), sa.Column('patch_base64', sa.Text, nullable=False), sa.Column('patch_author', sa.Text, nullable=False), sa.Column('patch_comment', sa.Text, nullable=False), sa.Column('subdir', sa.Text), ) sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('ss_hash', sa.String(40), nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('created_at', sa.Integer, nullable=False), ) changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id')), ) patches.create() sourcestamps.create() changes.create() def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table('changes', metadata, autoload=True) self.assertIsInstance(changes.c.parent_changeids.type, sa.Integer) return self.do_test_migration(42, 43, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_044_add_step_hidden.py000066400000000000000000000050301361162603000315570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), sa.Column( 'state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), ) steps.create() conn.execute(steps.insert(), [ dict(number=3, name='echo', urls_json='[]', state_string='')]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn steps = sautils.Table('steps', metadata, autoload=True) self.assertIsInstance(steps.c.hidden.type, sa.SmallInteger) q = sa.select([steps.c.name, steps.c.hidden]) num_rows = 0 for row in conn.execute(q): # verify that the default value was set correctly self.assertEqual(row.hidden, 0) num_rows += 1 self.assertEqual(num_rows, 1) return self.do_test_migration(43, 44, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_045_worker_transition.py000066400000000000000000000342721361162603000322570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from sqlalchemy.engine.reflection import Inspector from twisted.internet import defer from twisted.trial import unittest from buildbot.db.types.json import JsonObject from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def _define_old_tables(self, metadata): self.buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id'), nullable=False), # ... ) self.buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('parent_buildid', sa.Integer, sa.ForeignKey('builds.id', use_alter=True, name='parent_buildid')), # ... ) self.builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), # ... ) self.builder_masters = sautils.Table( 'builder_masters', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), # ... ) self.masters = sautils.Table( "masters", metadata, sa.Column('id', sa.Integer, primary_key=True), # ... ) self.builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id')), sa.Column('buildrequestid', sa.Integer, sa.ForeignKey('buildrequests.id'), nullable=False), sa.Column('buildslaveid', sa.Integer), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), sa.Column( 'state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) self.buildslaves = sautils.Table( "buildslaves", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(50), nullable=False), sa.Column("info", JsonObject, nullable=False), ) self.configured_buildslaves = sautils.Table( 'configured_buildslaves', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('buildermasterid', sa.Integer, sa.ForeignKey('builder_masters.id'), nullable=False), sa.Column('buildslaveid', sa.Integer, sa.ForeignKey('buildslaves.id'), nullable=False), ) self.connected_buildslaves = sautils.Table( 'connected_buildslaves', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), sa.Column('buildslaveid', sa.Integer, sa.ForeignKey('buildslaves.id'), nullable=False), ) def _create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self._define_old_tables(metadata) metadata.create_all() sa.Index( 'builds_buildrequestid', self.builds.c.buildrequestid).create() sa.Index('builds_number', self.builds.c.builderid, self.builds.c.number, unique=True).create() sa.Index('builds_buildslaveid', self.builds.c.buildslaveid).create() sa.Index('builds_masterid', self.builds.c.masterid).create() sa.Index( 'buildslaves_name', self.buildslaves.c.name, unique=True).create() sa.Index('configured_slaves_buildmasterid', self.configured_buildslaves.c.buildermasterid).create() sa.Index('configured_slaves_slaves', self.configured_buildslaves.c.buildslaveid).create() sa.Index('configured_slaves_identity', self.configured_buildslaves.c.buildermasterid, self.configured_buildslaves.c.buildslaveid, unique=True).create() sa.Index('connected_slaves_masterid', self.connected_buildslaves.c.masterid).create() sa.Index('connected_slaves_slaves', self.connected_buildslaves.c.buildslaveid).create() sa.Index('connected_slaves_identity', self.connected_buildslaves.c.masterid, self.connected_buildslaves.c.buildslaveid, unique=True).create() @defer.inlineCallbacks def test_update_inconsistent_builds_buildslaves(self): def setup_thd(conn): self._create_tables_thd(conn) conn.execute(self.masters.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(self.buildsets.insert(), [dict(id=5)]) conn.execute(self.buildrequests.insert(), [ dict(id=3, buildsetid=5), dict(id=4, buildsetid=5), ]) conn.execute(self.buildslaves.insert(), [ dict(id=30, name='worker-1', info={}), dict(id=31, name='worker-2', info={"a": 1}), ]) conn.execute(self.builds.insert(), [ dict(id=10, number=2, buildrequestid=3, buildslaveid=123, masterid=1, started_at=0, state_string='state'), dict(id=11, number=1, buildrequestid=4, buildslaveid=31, masterid=2, started_at=1000, state_string='state2'), ]) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn # Verify database contents. # 'workers' table contents. workers = sautils.Table('workers', metadata, autoload=True) c = workers.c q = sa.select( [c.id, c.name, c.info] ).order_by(c.id) self.assertEqual( q.execute().fetchall(), [ (30, 'worker-1', '{}'), (31, 'worker-2', '{"a": 1}'), ]) # 'builds' table contents. builds = sautils.Table('builds', metadata, autoload=True) c = builds.c q = sa.select( [c.id, c.number, c.builderid, c.buildrequestid, c.workerid, c.masterid, c.started_at, c.complete_at, c.state_string, c.results] ).order_by(c.id) # Check that build with invalid reference to buildslaves now # have no reference to it. self.assertEqual( q.execute().fetchall(), [ (10, 2, None, 3, None, 1, 0, None, 'state', None), (11, 1, None, 4, 31, 2, 1000, None, 'state2', None), ]) yield self.do_test_migration(44, 45, setup_thd, verify_thd) def test_update(self): def setup_thd(conn): self._create_tables_thd(conn) conn.execute(self.masters.insert(), [ dict(id=10), dict(id=11), ]) conn.execute(self.buildsets.insert(), [ dict(id=90), dict(id=91), ]) conn.execute(self.buildrequests.insert(), [ dict(id=20, buildsetid=90), dict(id=21, buildsetid=91), ]) conn.execute(self.builders.insert(), [ dict(id=50) ]) conn.execute(self.buildslaves.insert(), [ dict(id=30, name='worker-1', info={}), dict(id=31, name='worker-2', info={"a": 1}), ]) conn.execute(self.builds.insert(), [ dict(id=40, number=1, buildrequestid=20, buildslaveid=30, masterid=10, started_at=1000, state_string='state'), ]) conn.execute(self.builds.insert(), [ dict(id=41, number=2, builderid=50, buildrequestid=21, masterid=11, started_at=2000, complete_at=3000, state_string='state 2', results=9), ]) conn.execute(self.builder_masters.insert(), [ dict(id=70), dict(id=71), ]) conn.execute(self.configured_buildslaves.insert(), [ dict(id=60, buildermasterid=70, buildslaveid=30), dict(id=61, buildermasterid=71, buildslaveid=31), ]) conn.execute(self.connected_buildslaves.insert(), [ dict(id=80, masterid=10, buildslaveid=30), dict(id=81, masterid=11, buildslaveid=31), ]) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn # Verify database contents. # 'workers' table contents. workers = sautils.Table('workers', metadata, autoload=True) c = workers.c q = sa.select( [c.id, c.name, c.info] ).order_by(c.id) self.assertEqual( q.execute().fetchall(), [ (30, 'worker-1', '{}'), (31, 'worker-2', '{"a": 1}'), ]) # 'builds' table contents. builds = sautils.Table('builds', metadata, autoload=True) c = builds.c q = sa.select( [c.id, c.number, c.builderid, c.buildrequestid, c.workerid, c.masterid, c.started_at, c.complete_at, c.state_string, c.results] ).order_by(c.id) self.assertEqual( q.execute().fetchall(), [ (40, 1, None, 20, 30, 10, 1000, None, 'state', None), (41, 2, 50, 21, None, 11, 2000, 3000, 'state 2', 9), ]) # 'configured_workers' table contents. configured_workers = sautils.Table( 'configured_workers', metadata, autoload=True) c = configured_workers.c q = sa.select( [c.id, c.buildermasterid, c.workerid] ).order_by(c.id) self.assertEqual( q.execute().fetchall(), [ (60, 70, 30), (61, 71, 31), ]) # 'connected_workers' table contents. connected_workers = sautils.Table( 'connected_workers', metadata, autoload=True) c = connected_workers.c q = sa.select( [c.id, c.masterid, c.workerid] ).order_by(c.id) self.assertEqual( q.execute().fetchall(), [ (80, 10, 30), (81, 11, 31), ]) # Verify that there is no "slave"-named items in schema. inspector = Inspector(conn) def check_name(name, table_name, item_type): if not name: return self.assertTrue( "slave" not in name.lower(), msg="'slave'-named {type} in table '{table}': " "'{name}'".format( type=item_type, table=table_name, name=name)) # Check every table. for table_name in inspector.get_table_names(): # Check table name. check_name(table_name, table_name, "table name") # Check column names. for column_info in inspector.get_columns(table_name): check_name(column_info['name'], table_name, "column") # Check foreign key names. for fk_info in inspector.get_foreign_keys(table_name): check_name(fk_info['name'], table_name, "foreign key") # Check indexes names. for index_info in inspector.get_indexes(table_name): check_name(index_info['name'], table_name, "index") # Check primary keys constraints names. pk_info = inspector.get_pk_constraint(table_name) check_name(pk_info.get('name'), table_name, "primary key") # Test that no "slave"-named items present in schema for name in inspector.get_schema_names(): self.assertTrue("slave" not in name.lower()) return self.do_test_migration(44, 45, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_046_mysql_innodb_compatibility.py000066400000000000000000000224771361162603000341300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): table_columns = { 'changes': ['author', 'branch', 'revision', 'category'], 'object_state': ['name'], 'users': ['identifier'] } def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def _define_old_tables(self, conn): metadata = sa.MetaData() metadata.bind = conn self.changes = sautils.Table( 'changes', metadata, # ... sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), # CVS uses NULL sa.Column('category', sa.String(256))) self.object_state = sautils.Table( "object_state", metadata, # ... sa.Column("objectid", sa.Integer, # commented not to add objects table # sa.ForeignKey('objects.id'), nullable=False), sa.Column("name", sa.String(length=256), nullable=False)) self.users = sautils.Table( "users", metadata, # ... sa.Column("uid", sa.Integer, primary_key=True), sa.Column("identifier", sa.String(256), nullable=False), ) def create_tables_thd(self, conn): self._define_old_tables(conn) self.changes.create() self.object_state.create() self.users.create() def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn conn.execute(self.changes.insert(), [ dict(changeid=1, author="a" * 255, branch="a", revision="a", category="a")]) conn.execute(self.object_state.insert(), [ dict(objectid=1, name="a" * 255)]) conn.execute(self.users.insert(), [ dict(uid=1, identifier="a" * 255)]) # Verify that the columns have been updated to sa.Strint(255) for table, columns in self.table_columns.items(): tbl = sautils.Table(table, metadata, autoload=True) for column in columns: self.assertIsInstance( getattr(tbl.c, column).type, sa.String) self.assertEqual(getattr(tbl.c, column).type.length, 255) return self.do_test_migration(45, 46, setup_thd, verify_thd) @defer.inlineCallbacks def assertExpectedMessage(self, d, expected_msg): exception = None try: yield d except Exception as e: exception = e self.flushLoggedErrors() self.assertEqual(str(exception), expected_msg) def do_invalid_test(self, table, value, expected_msg): def setup_thd(conn): self.create_tables_thd(conn) metadata = sa.MetaData() metadata.bind = conn conn.execute(getattr(self, table).insert(), [value]) return self.assertExpectedMessage(self.do_test_migration(45, 46, setup_thd, None), expected_msg) def test_invalid_author_in_changes(self): return self.do_invalid_test('changes', dict(changeid=1, author="a" * 256, branch="a", revision="a", category="a"), "\n".join(["", "- 'changes' table has invalid data:", " changes.change=1 has author, branch, revision or category longer than 255"])) def test_invalid_branch_in_changes(self): return self.do_invalid_test('changes', dict(changeid=1, author="a", branch="a" * 256, revision="a", category="a"), "\n".join(["", "- 'changes' table has invalid data:", " changes.change=1 has author, branch, revision or category longer than 255"])) def test_invalid_revision_in_changes(self): return self.do_invalid_test('changes', dict(changeid=1, author="a", branch="a", revision="a" * 256, category="a"), "\n".join(["", "- 'changes' table has invalid data:", " changes.change=1 has author, branch, revision or category longer than 255"])) def test_invalid_category_in_changes(self): return self.do_invalid_test('changes', dict(changeid=1, author="a", branch="a", revision="a", category="a" * 256), "\n".join(["", "- 'changes' table has invalid data:", " changes.change=1 has author, branch, revision or category longer than 255"])) def test_invalid_name_in_object_state(self): return self.do_invalid_test('object_state', dict(objectid=1, name="a" * 256), "\n".join(["", "- 'object_state' table has invalid data:", " object_state.objectid=1 has name longer than 255"])) def test_invalid_identifier_in_users(self): return self.do_invalid_test('users', dict(uid=1, identifier="a" * 256), "\n".join(["", "- 'users_state' table has invalid data:", " users.uid=1 has identifier longer than 255"])) @defer.inlineCallbacks def test_multiple_invalid_values(self): def setup_thd(conn): self.create_tables_thd(conn) metadata = sa.MetaData() metadata.bind = conn conn.execute(self.users.insert(), [dict(uid=1, identifier="a" * 256)]) conn.execute(self.changes.insert(), [dict(changeid=1, author="a", branch="a", revision="a", category="a" * 256), dict(changeid=2, author="a" * 256, branch="a", revision="a", category="a")]) yield self.assertExpectedMessage(self.do_test_migration(45, 46, setup_thd, None), "\n".join(["", "- 'changes' table has invalid data:", " changes.change=1 has author, branch, revision or category longer than 255", " changes.change=2 has author, branch, revision or category longer than 255", "- 'users_state' table has invalid data:", " users.uid=1 has identifier longer than 255"])) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_047_cascading_deleletes.py000066400000000000000000000104611361162603000324320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.String(50), nullable=False), ) builders.create() masters = sautils.Table( 'masters', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.String(50), nullable=False), ) masters.create() workers = sautils.Table( 'workers', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.String(50), nullable=False), ) workers.create() builder_masters = sautils.Table( 'builder_masters', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), ) builder_masters.create() configured_workers = sautils.Table( 'configured_workers', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id'), nullable=False), sa.Column('buildermasterid', sa.Integer, sa.ForeignKey('builder_masters.id'), nullable=False), ) configured_workers.create() conn.execute(builders.insert(), [ dict(id=3, name='echo'), dict(id=4, name='tests'), ]) conn.execute(masters.insert(), [ dict(id=1, name='bm1'), dict(id=2, name='bm2'), ]) conn.execute(builder_masters.insert(), [ dict(id=1, builderid=4, masterid=1), dict(id=2, builderid=3, masterid=2), ]) conn.execute(workers.insert(), [ dict(id=1, name="powerful"), dict(id=2, name="limited"), ]) conn.execute(configured_workers.insert(), [ dict(id=1, buildermasterid=1, workerid=2), ]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): """Can't verify much under SQLite Even with PRAGMA foreign_keys=ON, the cascading deletes are actually ignored with SQLite, so we can't really test the behaviour in that environment. On the other hand, SQLite's FKs apparently don't prevent removals. The cascading behaviour is really needed for other DBs right now, and only in reconfigs. """ metadata = sa.MetaData() metadata.bind = conn masters = sautils.Table('masters', metadata, autoload=True) conn.execute(masters.delete().where(masters.c.name == 'bm1')) q = sa.select([masters.c.id, masters.c.name]) self.assertEqual(conn.execute(q).fetchall(), [(2, 'bm2')]) return self.do_test_migration(46, 47, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_048_change_properties_to_text.py000066400000000000000000000052501361162603000337400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from random import choice from string import ascii_lowercase import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_table_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn change_properties = sautils.Table( 'change_properties', metadata, sa.Column('changeid', sa.Integer, nullable=False), sa.Column('property_name', sa.String(256), nullable=False), sa.Column('property_value', sa.String(1024), nullable=False), ) change_properties.create() def test_update(self): def setup_thd(conn): self.create_table_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn random_length = 65535 random_string = ''.join(choice(ascii_lowercase) for byte in range(random_length)) # Verify column type is text change_properties = sautils.Table( 'change_properties', metadata, autoload=True) self.assertIsInstance( change_properties.c.property_value.type, sa.Text) # Test write and read random string conn.execute(change_properties.insert(), [dict( changeid=1, property_name="test_change_properties_property_value_length", property_value=random_string, )]) q = conn.execute(sa.select( [change_properties.c.property_value]).where(change_properties.c.changeid == 1)) [self.assertEqual(q_string[0], random_string) for q_string in q] return self.do_test_migration(47, 48, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_049_add_schedulers_enabled.py000066400000000000000000000046341361162603000331220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn schedulers = sautils.Table( 'schedulers', metadata, sa.Column("id", sa.Integer, primary_key=True), # name for this scheduler, as given in the configuration, plus a hash # of that name used for a unique index sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False, server_default='') ) schedulers.create() conn.execute(schedulers.insert(), [ dict(number=3, name='echo', urls_json='[]')]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn schedulers = sautils.Table('schedulers', metadata, autoload=True) self.assertIsInstance(schedulers.c.enabled.type, sa.SmallInteger) q = sa.select([schedulers.c.name, schedulers.c.enabled]) num_rows = 0 for row in conn.execute(q): # verify that the default value was set correctly self.assertEqual(row.enabled, True) num_rows += 1 self.assertEqual(num_rows, 1) return self.do_test_migration(48, 49, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_050_cascading_deletes_all.py000066400000000000000000000376731361162603000327510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_and_insert_data(self, conn): metadata = sa.MetaData() metadata.bind = conn # create tables (prior to schema migration) masters = sautils.Table( "masters", metadata, sa.Column('id', sa.Integer, primary_key=True), ) masters.create() users = sautils.Table( "users", metadata, sa.Column("uid", sa.Integer, primary_key=True), ) users.create() workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), ) workers.create() sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() schedulers = sautils.Table( 'schedulers', metadata, sa.Column("id", sa.Integer, primary_key=True), ) schedulers.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), ) buildsets.create() builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), ) builders.create() tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), ) tags.create() changesources = sautils.Table( 'changesources', metadata, sa.Column("id", sa.Integer, primary_key=True), ) changesources.create() buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id"), nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), ) buildrequests.create() builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id')), sa.Column('buildrequestid', sa.Integer, sa.ForeignKey( 'buildrequests.id', use_alter=True, name='buildrequestid'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id')), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), ) builds.create() buildrequest_claims = sautils.Table( 'buildrequest_claims', metadata, sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), index=True, nullable=True), ) buildrequest_claims.create() build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id'), nullable=False), sa.Column('name', sa.String(256), nullable=False), ) build_properties.create() steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id')), ) steps.create() logs = sautils.Table( 'logs', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id')), ) logs.create() logchunks = sautils.Table( 'logchunks', metadata, sa.Column('logid', sa.Integer, sa.ForeignKey('logs.id')), sa.Column('first_line', sa.Integer, nullable=False), sa.Column('last_line', sa.Integer, nullable=False), ) logchunks.create() buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), ) buildset_properties.create() changesource_masters = sautils.Table( 'changesource_masters', metadata, sa.Column('changesourceid', sa.Integer, sa.ForeignKey('changesources.id'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), ) changesource_masters.create() buildset_sourcestamps = sautils.Table( 'buildset_sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id'), nullable=False), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id'), nullable=False), ) buildset_sourcestamps.create() connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id'), nullable=False), ) connected_workers.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id')), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey( 'changes.changeid'), nullable=True), ) changes.create() change_files = sautils.Table( 'change_files', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid'), nullable=False), sa.Column('filename', sa.String(1024), nullable=False), ) change_files.create() change_properties = sautils.Table( 'change_properties', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), ) change_properties.create() change_users = sautils.Table( "change_users", metadata, sa.Column("changeid", sa.Integer, sa.ForeignKey('changes.changeid'), nullable=False), sa.Column("uid", sa.Integer, sa.ForeignKey('users.uid'), nullable=False), ) change_users.create() scheduler_masters = sautils.Table( 'scheduler_masters', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), ) scheduler_masters.create() scheduler_changes = sautils.Table( 'scheduler_changes', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id')), sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid')), ) scheduler_changes.create() builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id'), nullable=False), ) builders_tags.create() objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), ) objects.create() object_state = sautils.Table( "object_state", metadata, sa.Column("objectid", sa.Integer, sa.ForeignKey('objects.id'), nullable=False), sa.Column("name", sa.String(length=255), nullable=False), ) object_state.create() users_info = sautils.Table( "users_info", metadata, sa.Column("uid", sa.Integer, sa.ForeignKey('users.uid'), nullable=False), sa.Column("attr_type", sa.String(128), nullable=False), ) users_info.create() # insert data conn.execute(masters.insert(), [ dict(id=1), ]) conn.execute(objects.insert(), [ dict(id=1), ]) conn.execute(object_state.insert(), [ dict(objectid=1, name='size'), ]) conn.execute(users.insert(), [ dict(uid=1), dict(uid=2), ]) conn.execute(users_info.insert(), [ dict(uid=1, attr_type='first_name'), dict(uid=1, attr_type='last_name'), dict(uid=2, attr_type='first_name'), dict(uid=2, attr_type='last_name'), ]) conn.execute(builders.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(tags.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(builders_tags.insert(), [ dict(id=1, builderid=1, tagid=1), dict(id=2, builderid=2, tagid=1), dict(id=3, builderid=2, tagid=2), ]) conn.execute(workers.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(connected_workers.insert(), [ dict(id=1, masterid=1, workerid=1), dict(id=2, masterid=1, workerid=2), ]) conn.execute(changesources.insert(), [ dict(id=1), ]) conn.execute(changesource_masters.insert(), [ dict(changesourceid=1, masterid=1), ]) conn.execute(sourcestamps.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(changes.insert(), [ dict(changeid=1, sourcestampid=1), dict(changeid=2, sourcestampid=2), ]) conn.execute(change_users.insert(), [ dict(changeid=1, uid=1), dict(changeid=2, uid=2), ]) conn.execute(change_properties.insert(), [ dict(changeid=1, property_name='release_lvl'), dict(changeid=2, property_name='release_lvl'), ]) conn.execute(change_files.insert(), [ dict(changeid=1, filename='README'), dict(changeid=2, filename='setup.py'), ]) conn.execute(schedulers.insert(), [ dict(changeid=1), ]) conn.execute(scheduler_masters.insert(), [ dict(schedulerid=1, masterid=1), ]) conn.execute(scheduler_changes.insert(), [ dict(schedulerid=1, changeid=1), dict(schedulerid=1, changeid=2), ]) conn.execute(buildsets.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(buildset_properties.insert(), [ dict(buildsetid=1, property_name='color'), dict(buildsetid=2, property_name='smell'), ]) conn.execute(buildset_sourcestamps.insert(), [ dict(id=1, buildsetid=1, sourcestampid=1), dict(id=2, buildsetid=2, sourcestampid=2), ]) conn.execute(buildrequests.insert(), [ dict(id=1, buildsetid=1, builderid=1), dict(id=2, buildsetid=1, builderid=2), dict(id=3, buildsetid=2, builderid=1), dict(id=4, buildsetid=2, builderid=2), ]) conn.execute(buildrequest_claims.insert(), [ dict(brid=1, masterid=1), dict(brid=2, masterid=1), dict(brid=3, masterid=1), dict(brid=4, masterid=1), ]) conn.execute(builds.insert(), [ dict(id=1, builderid=1, buildrequestid=1, workerid=2, masterid=1), dict(id=2, builderid=2, buildrequestid=2, workerid=1, masterid=1), dict(id=3, builderid=1, buildrequestid=3, workerid=1, masterid=1), dict(id=4, builderid=2, buildrequestid=4, workerid=2, masterid=1), ]) conn.execute(build_properties.insert(), [ dict(buildid=1, name='buildername'), dict(buildid=2, name='buildername'), dict(buildid=3, name='buildername'), dict(buildid=4, name='buildername'), ]) conn.execute(steps.insert(), [ dict(id=1, buildid=1), dict(id=2, buildid=1), dict(id=3, buildid=2), dict(id=4, buildid=2), dict(id=5, buildid=1), dict(id=6, buildid=1), dict(id=7, buildid=2), dict(id=8, buildid=2), ]) conn.execute(logs.insert(), [ dict(id=1, stepid=1), dict(id=2, stepid=2), dict(id=3, stepid=3), dict(id=4, stepid=4), dict(id=5, stepid=5), dict(id=6, stepid=6), dict(id=7, stepid=7), dict(id=8, stepid=8), ]) conn.execute(logchunks.insert(), [ dict(logid=1, first_line=0, last_line=100), dict(logid=2, first_line=0, last_line=100), dict(logid=3, first_line=0, last_line=100), dict(logid=4, first_line=0, last_line=100), dict(logid=5, first_line=0, last_line=100), dict(logid=6, first_line=0, last_line=100), dict(logid=7, first_line=0, last_line=100), dict(logid=8, first_line=0, last_line=100), ]) def test_update(self): def setup_thd(conn): self.create_tables_and_insert_data(conn) def verify_thd(conn): """Can't verify much under SQLite Even with PRAGMA foreign_keys=ON, the cascading deletes are actually ignored with SQLite, so we can't really test the behaviour in that environment. On the other hand, SQLite's FKs apparently don't prevent removals. The cascading behaviour is really needed for other DBs right now, and only in reconfigs. """ metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table('sourcestamps', metadata, autoload=True) conn.execute(sourcestamps.delete().where(sourcestamps.c.id == 1)) q = sa.select([sourcestamps.c.id]) self.assertEqual(conn.execute(q).fetchall(), [(2,)]) return self.do_test_migration(49, 50, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_051_add_workers_status.py000066400000000000000000000046531361162603000324000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.db.types.json import JsonObject from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn # workers workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(50), nullable=False), sa.Column("info", JsonObject, nullable=False), ) workers.create() conn.execute(workers.insert(), [ dict(id=3, name='foo', info='{}')]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn workers = sautils.Table('workers', metadata, autoload=True) self.assertIsInstance(workers.c.paused.type, sa.SmallInteger) self.assertIsInstance(workers.c.graceful.type, sa.SmallInteger) q = sa.select( [workers.c.name, workers.c.paused, workers.c.graceful]) num_rows = 0 for row in conn.execute(q): # verify that the default value was set correctly self.assertEqual(row.paused, False) self.assertEqual(row.graceful, False) num_rows += 1 self.assertEqual(num_rows, 1) return self.do_test_migration(50, 51, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_052_cascading_set_null.py000066400000000000000000000447251361162603000323170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa import sqlalchemy.exc as saexc from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_and_insert_data(self, conn): metadata = sa.MetaData() metadata.bind = conn # create tables (prior to schema migration) masters = sautils.Table( "masters", metadata, sa.Column('id', sa.Integer, primary_key=True), ) masters.create() users = sautils.Table( "users", metadata, sa.Column("uid", sa.Integer, primary_key=True), ) users.create() workers = sautils.Table( "workers", metadata, sa.Column("id", sa.Integer, primary_key=True), ) workers.create() sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() schedulers = sautils.Table( 'schedulers', metadata, sa.Column("id", sa.Integer, primary_key=True), ) schedulers.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), ) buildsets.create() builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), ) builders.create() tags = sautils.Table( 'tags', metadata, sa.Column('id', sa.Integer, primary_key=True), ) tags.create() changesources = sautils.Table( 'changesources', metadata, sa.Column("id", sa.Integer, primary_key=True), ) changesources.create() buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id", ondelete='CASCADE'), nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), ) buildrequests.create() builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE')), sa.Column('buildrequestid', sa.Integer, sa.ForeignKey( 'buildrequests.id', use_alter=True, name='buildrequestid', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE')), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) builds.create() buildrequest_claims = sautils.Table( 'buildrequest_claims', metadata, sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id', ondelete='CASCADE'), nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), index=True, nullable=True), ) buildrequest_claims.create() build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE'), nullable=False), sa.Column('name', sa.String(256), nullable=False), ) build_properties.create() steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id', ondelete='CASCADE')), ) steps.create() logs = sautils.Table( 'logs', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id', ondelete='CASCADE')), ) logs.create() logchunks = sautils.Table( 'logchunks', metadata, sa.Column('logid', sa.Integer, sa.ForeignKey('logs.id', ondelete='CASCADE')), sa.Column('first_line', sa.Integer, nullable=False), sa.Column('last_line', sa.Integer, nullable=False), ) logchunks.create() buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), ) buildset_properties.create() changesource_masters = sautils.Table( 'changesource_masters', metadata, sa.Column('changesourceid', sa.Integer, sa.ForeignKey('changesources.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) changesource_masters.create() buildset_sourcestamps = sautils.Table( 'buildset_sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id', ondelete='CASCADE'), nullable=False), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), ) buildset_sourcestamps.create() connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id', ondelete='CASCADE'), nullable=False), ) connected_workers.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE')), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=True), ) changes.create() change_files = sautils.Table( 'change_files', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('filename', sa.String(1024), nullable=False), ) change_files.create() change_properties = sautils.Table( 'change_properties', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), ) change_properties.create() change_users = sautils.Table( "change_users", metadata, sa.Column("changeid", sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE'), nullable=False), sa.Column("uid", sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), ) change_users.create() scheduler_masters = sautils.Table( 'scheduler_masters', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id', ondelete='CASCADE'), nullable=False), ) scheduler_masters.create() scheduler_changes = sautils.Table( 'scheduler_changes', metadata, sa.Column('schedulerid', sa.Integer, sa.ForeignKey('schedulers.id', ondelete='CASCADE')), sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='CASCADE')), ) scheduler_changes.create() builders_tags = sautils.Table( 'builders_tags', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id', ondelete='CASCADE'), nullable=False), sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id', ondelete='CASCADE'), nullable=False), ) builders_tags.create() objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), ) objects.create() object_state = sautils.Table( "object_state", metadata, sa.Column("objectid", sa.Integer, sa.ForeignKey('objects.id', ondelete='CASCADE'), nullable=False), sa.Column("name", sa.String(length=255), nullable=False), ) object_state.create() users_info = sautils.Table( "users_info", metadata, sa.Column("uid", sa.Integer, sa.ForeignKey('users.uid', ondelete='CASCADE'), nullable=False), sa.Column("attr_type", sa.String(128), nullable=False), ) users_info.create() # insert data conn.execute(masters.insert(), [ dict(id=1), ]) conn.execute(objects.insert(), [ dict(id=1), ]) conn.execute(object_state.insert(), [ dict(objectid=1, name='size'), ]) conn.execute(users.insert(), [ dict(uid=1), dict(uid=2), ]) conn.execute(users_info.insert(), [ dict(uid=1, attr_type='first_name'), dict(uid=1, attr_type='last_name'), dict(uid=2, attr_type='first_name'), dict(uid=2, attr_type='last_name'), ]) conn.execute(builders.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(tags.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(builders_tags.insert(), [ dict(id=1, builderid=1, tagid=1), dict(id=2, builderid=2, tagid=1), dict(id=3, builderid=2, tagid=2), ]) conn.execute(workers.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(connected_workers.insert(), [ dict(id=1, masterid=1, workerid=1), dict(id=2, masterid=1, workerid=2), ]) conn.execute(changesources.insert(), [ dict(id=1), ]) conn.execute(changesource_masters.insert(), [ dict(changesourceid=1, masterid=1), ]) conn.execute(sourcestamps.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(changes.insert(), [ dict(changeid=1, sourcestampid=1), dict(changeid=2, sourcestampid=2, parent_changeids=1), ]) conn.execute(change_users.insert(), [ dict(changeid=1, uid=1), dict(changeid=2, uid=2), ]) conn.execute(change_properties.insert(), [ dict(changeid=1, property_name='release_lvl'), dict(changeid=2, property_name='release_lvl'), ]) conn.execute(change_files.insert(), [ dict(changeid=1, filename='README'), dict(changeid=2, filename='setup.py'), ]) conn.execute(schedulers.insert(), [ dict(changeid=1), ]) conn.execute(scheduler_masters.insert(), [ dict(schedulerid=1, masterid=1), ]) conn.execute(scheduler_changes.insert(), [ dict(schedulerid=1, changeid=1), dict(schedulerid=1, changeid=2), dict(schedulerid=None, changeid=None), ]) conn.execute(buildsets.insert(), [ dict(id=1), dict(id=2), ]) conn.execute(buildset_properties.insert(), [ dict(buildsetid=1, property_name='color'), dict(buildsetid=2, property_name='smell'), ]) conn.execute(buildset_sourcestamps.insert(), [ dict(id=1, buildsetid=1, sourcestampid=1), dict(id=2, buildsetid=2, sourcestampid=2), ]) conn.execute(buildrequests.insert(), [ dict(id=1, buildsetid=1, builderid=1), dict(id=2, buildsetid=1, builderid=2), dict(id=3, buildsetid=2, builderid=1), dict(id=4, buildsetid=2, builderid=2), ]) conn.execute(buildrequest_claims.insert(), [ dict(brid=1, masterid=1), dict(brid=2, masterid=1), dict(brid=3, masterid=1), dict(brid=4, masterid=1), ]) conn.execute(builds.insert(), [ dict(id=1, builderid=1, buildrequestid=1, workerid=2, masterid=1), dict(id=2, builderid=2, buildrequestid=2, workerid=1, masterid=1), dict(id=3, builderid=1, buildrequestid=3, workerid=1, masterid=1), dict(id=4, builderid=2, buildrequestid=4, workerid=2, masterid=1), ]) conn.execute(build_properties.insert(), [ dict(buildid=1, name='buildername'), dict(buildid=2, name='buildername'), dict(buildid=3, name='buildername'), dict(buildid=4, name='buildername'), ]) conn.execute(steps.insert(), [ dict(id=1, buildid=1), dict(id=2, buildid=1), dict(id=3, buildid=2), dict(id=4, buildid=2), dict(id=5, buildid=1), dict(id=6, buildid=1), dict(id=7, buildid=2), dict(id=8, buildid=2), ]) conn.execute(logs.insert(), [ dict(id=1, stepid=1), dict(id=2, stepid=2), dict(id=3, stepid=3), dict(id=4, stepid=4), dict(id=5, stepid=5), dict(id=6, stepid=6), dict(id=7, stepid=7), dict(id=8, stepid=8), dict(id=9, stepid=None), ]) conn.execute(logchunks.insert(), [ dict(logid=1, first_line=0, last_line=100), dict(logid=2, first_line=0, last_line=100), dict(logid=3, first_line=0, last_line=100), dict(logid=4, first_line=0, last_line=100), dict(logid=5, first_line=0, last_line=100), dict(logid=6, first_line=0, last_line=100), dict(logid=7, first_line=0, last_line=100), dict(logid=8, first_line=0, last_line=100), dict(logid=None, first_line=0, last_line=100), ]) @defer.inlineCallbacks def test_update(self): def setup_thd(conn): self.create_tables_and_insert_data(conn) def verify_thd(conn): """Can't verify much under SQLite Even with PRAGMA foreign_keys=ON, the cascading deletes are actually ignored with SQLite, so we can't really test the behaviour in that environment. """ metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table('changes', metadata, autoload=True) builds = sautils.Table('builds', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) sourcestamps = sautils.Table('sourcestamps', metadata, autoload=True) conn.execute(sourcestamps.delete().where(sourcestamps.c.id == 1)) if conn.dialect.name not in ('mysql', 'sqlite'): q = sa.select([changes.c.changeid]) self.assertEqual(conn.execute(q).fetchall(), [(2,)]) q = sa.select([changes.c.parent_changeids]).where( changes.c.changeid == 2) self.assertEqual(conn.execute(q).fetchall(), [(None,)]) conn.execute(workers.delete().where(workers.c.id == 1)) if conn.dialect.name not in ('mysql', 'sqlite'): q = sa.select([builds.c.id, builds.c.workerid]).order_by( builds.c.id) self.assertEqual(conn.execute(q).fetchall(), [(1, 2), (2, None), (3, None), (4, 2)]) with self.assertRaises(saexc.DatabaseError): conn.execute(builds.insert(), [ dict(id=5, builderid=None, buildrequestid=4, workerid=2, masterid=1), ]) with self.assertWarnsRegex( UserWarning, 'Inconsistent data found in DB: table .+?, deleting invalid rows'): yield self.do_test_migration(51, 52, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_053_add_changes_committer.py000066400000000000000000000072541361162603000327760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(255), nullable=False), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(255)), sa.Column('revision', sa.String(255)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(255)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) changes.create() conn.execute(sourcestamps.insert(), [ dict(id=100), ]) conn.execute(changes.insert(), [ dict( changeid=1, author='warner', comments='fix whitespace', when_timestamp=256738404, repository='git://warner', codebase='core', project='Buildbot', sourcestampid=100), ]) def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table('changes', metadata, autoload=True) self.assertIsInstance(changes.c.committer.type, sa.String) q = sa.select([changes.c.author, changes.c.committer]) num_rows = 0 for row in conn.execute(q): # verify that the default value was set correctly self.assertEqual(row.committer, None) num_rows += 1 self.assertEqual(num_rows, 1) return self.do_test_migration(52, 53, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_054_add_index_to_steps.py000066400000000000000000000044311361162603000323250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_table_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer, nullable=False), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), sa.Column('state_string', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), sa.Column( 'hidden', sa.SmallInteger, nullable=False, server_default='0'), ) steps.create() def test_update(self): def setup_thd(conn): self.create_table_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn insp = sa.inspect(conn) indexes = insp.get_indexes('steps') index_names = [item['name'] for item in indexes] self.assertTrue('steps_started_at' in index_names) return self.do_test_migration(53, 54, setup_thd, verify_thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_migrate_versions_055_add_changes_committer.py000066400000000000000000000063731361162603000330010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy as sa from twisted.trial import unittest from buildbot.test.util import migration from buildbot.util import sautils class Migration(migration.MigrateTestMixin, unittest.TestCase): PREVIOUS_NULLABLE = True def setUp(self): return self.setUpMigrateTest() def tearDown(self): return self.tearDownMigrateTest() def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(255), nullable=False), sa.Column('committer', sa.String(255), nullable=self.PREVIOUS_NULLABLE), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(255)), sa.Column('revision', sa.String(255)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(255)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) changes.create() def test_update(self): def setup_thd(conn): self.create_tables_thd(conn) def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table('changes', metadata, autoload=True) self.assertIsInstance(changes.c.committer.type, sa.String) self.assertTrue(changes.c.committer.nullable) return self.do_test_migration(54, 55, setup_thd, verify_thd) class MigrationNotNullable(Migration): # same test, but in the case the column was previously non nullable (2.4.0 fresh install case) PREVIOUS_NULLABLE = False buildbot-2.6.0/master/buildbot/test/unit/test_db_model.py000066400000000000000000000040241361162603000235340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import enginestrategy from buildbot.db import model from buildbot.test.util import db class DBConnector_Basic(db.RealDatabaseMixin, unittest.TestCase): """ Basic tests of the DBConnector class - all start with an empty DB """ @defer.inlineCallbacks def setUp(self): yield self.setUpRealDatabase() engine = enginestrategy.create_engine(self.db_url, basedir=os.path.abspath('basedir')) # mock out the pool, and set up the model self.db = mock.Mock() self.db.pool.do_with_engine = lambda thd: defer.maybeDeferred( thd, engine) self.db.model = model.Model(self.db) self.db.start() def tearDown(self): self.db.stop() return self.tearDownRealDatabase() @defer.inlineCallbacks def test_is_current_empty(self): res = yield self.db.model.is_current() self.assertFalse(res) @defer.inlineCallbacks def test_is_current_full(self): yield self.db.model.upgrade() res = yield self.db.model.is_current() self.assertTrue(res) # the upgrade method is very well-tested by the integration tests; the # remainder of the object is just tables. buildbot-2.6.0/master/buildbot/test/unit/test_db_pool.py000066400000000000000000000153401361162603000234100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import time import sqlalchemy as sa from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.db import pool from buildbot.test.util import db from buildbot.util import sautils class Basic(unittest.TestCase): # basic tests, just using an in-memory SQL db and one thread def setUp(self): self.engine = sa.create_engine('sqlite://') self.engine.should_retry = lambda _: False self.engine.optimal_thread_pool_size = 1 self.pool = pool.DBThreadPool(self.engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): yield self.pool.shutdown() @defer.inlineCallbacks def test_do(self): def add(conn, addend1, addend2): rp = conn.execute("SELECT %d + %d" % (addend1, addend2)) return rp.scalar() res = yield self.pool.do(add, 10, 11) self.assertEqual(res, 21) @defer.inlineCallbacks def expect_failure(self, d, expected_exception, expect_logged_error=False): exception = None try: yield d except Exception as e: exception = e errors = self.flushLoggedErrors(expected_exception) if expect_logged_error: self.assertEqual(len(errors), 1) self.assertTrue(isinstance(exception, expected_exception)) def test_do_error(self): def fail(conn): rp = conn.execute("EAT COOKIES") return rp.scalar() return self.expect_failure(self.pool.do(fail), sa.exc.OperationalError, expect_logged_error=True) def test_do_exception(self): def raise_something(conn): raise RuntimeError("oh noes") return self.expect_failure(self.pool.do(raise_something), RuntimeError, expect_logged_error=True) @defer.inlineCallbacks def test_do_with_engine(self): def add(engine, addend1, addend2): rp = engine.execute("SELECT %d + %d" % (addend1, addend2)) return rp.scalar() res = yield self.pool.do_with_engine(add, 10, 11) self.assertEqual(res, 21) def test_do_with_engine_exception(self): def fail(engine): rp = engine.execute("EAT COOKIES") return rp.scalar() return self.expect_failure(self.pool.do_with_engine(fail), sa.exc.OperationalError) @defer.inlineCallbacks def test_persistence_across_invocations(self): # NOTE: this assumes that both methods are called with the same # connection; if they run in parallel threads then it is not valid to # assume that the database engine will have finalized the first # transaction (and thus created the table) by the time the second # transaction runs. This is why we set optimal_thread_pool_size in # setUp. def create_table(engine): engine.execute("CREATE TABLE tmp ( a integer )") yield self.pool.do_with_engine(create_table) def insert_into_table(engine): engine.execute("INSERT INTO tmp values ( 1 )") yield self.pool.do_with_engine(insert_into_table) class Stress(unittest.TestCase): def setUp(self): setup_engine = sa.create_engine('sqlite:///test.sqlite') setup_engine.execute("pragma journal_mode = wal") setup_engine.execute("CREATE TABLE test (a integer, b integer)") self.engine = sa.create_engine('sqlite:///test.sqlite') self.engine.optimal_thread_pool_size = 2 self.pool = pool.DBThreadPool(self.engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): yield self.pool.shutdown() os.unlink("test.sqlite") @defer.inlineCallbacks def test_inserts(self): def write(conn): trans = conn.begin() conn.execute("INSERT INTO test VALUES (1, 1)") time.sleep(31) trans.commit() d1 = self.pool.do(write) def write2(conn): trans = conn.begin() conn.execute("INSERT INTO test VALUES (1, 1)") trans.commit() d2 = defer.Deferred() d2.addCallback(lambda _: self.pool.do(write2)) reactor.callLater(0.1, d2.callback, None) yield defer.DeferredList([d1, d2]) # don't run this test, since it takes 30s del test_inserts class BasicWithDebug(Basic): # same thing, but with debug=True def setUp(self): pool.debug = True return super().setUp() def tearDown(self): pool.debug = False return super().tearDown() class Native(unittest.TestCase, db.RealDatabaseMixin): # similar tests, but using the BUILDBOT_TEST_DB_URL @defer.inlineCallbacks def setUp(self): yield self.setUpRealDatabase(want_pool=False) self.pool = pool.DBThreadPool(self.db_engine, reactor=reactor) @defer.inlineCallbacks def tearDown(self): # try to delete the 'native_tests' table meta = sa.MetaData() native_tests = sautils.Table("native_tests", meta) def thd(conn): native_tests.drop(bind=self.db_engine, checkfirst=True) yield self.pool.do(thd) # tearDownRealDatabase() won't shutdown the pool as want_pool was false in # setUpRealDatabase call yield self.pool.shutdown() yield self.tearDownRealDatabase() @defer.inlineCallbacks def test_ddl_and_queries(self): meta = sa.MetaData() native_tests = sautils.Table("native_tests", meta, sa.Column('name', sa.String(length=200))) # perform a DDL operation and immediately try to access that table; # this has caused problems in the past, so this is basically a # regression test. def ddl(conn): t = conn.begin() native_tests.create(bind=conn) t.commit() yield self.pool.do(ddl) def access(conn): native_tests.insert(bind=conn).execute([{'name': 'foo'}]) yield self.pool.do(access) buildbot-2.6.0/master/buildbot/test/unit/test_db_schedulers.py000066400000000000000000000411541361162603000246020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import schedulers from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import db from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin class Tests(interfaces.InterfaceTests): # test data ss92 = fakedb.SourceStamp(id=92) change3 = fakedb.Change(changeid=3) change4 = fakedb.Change(changeid=4) change5 = fakedb.Change(changeid=5) change6 = fakedb.Change(changeid=6, branch='sql') scheduler24 = fakedb.Scheduler(id=24, name='schname') master13 = fakedb.Master(id=13, name='m1', active=1) scheduler24master = fakedb.SchedulerMaster(schedulerid=24, masterid=13) scheduler25 = fakedb.Scheduler(id=25, name='schname2') master14 = fakedb.Master(id=14, name='m2', active=0) scheduler25master = fakedb.SchedulerMaster(schedulerid=25, masterid=14) # tests def test_signature_enable(self): @self.assertArgSpecMatches(self.db.schedulers.enable) def enable(self, schedulerid, v): pass @defer.inlineCallbacks def test_enable(self): yield self.insertTestData([self.scheduler24, self.master13, self.scheduler24master]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) yield self.db.schedulers.enable(24, False) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=False, masterid=13)) yield self.db.schedulers.enable(24, True) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) def test_signature_classifyChanges(self): @self.assertArgSpecMatches(self.db.schedulers.classifyChanges) def classifyChanges(self, schedulerid, classifications): pass @defer.inlineCallbacks def test_classifyChanges(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.scheduler24]) yield self.db.schedulers.classifyChanges(24, {3: False, 4: True}) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: False, 4: True}) @defer.inlineCallbacks def test_classifyChanges_again(self): # test reclassifying changes, which may happen during some timing # conditions. It's important that this test uses multiple changes, # only one of which already exists yield self.insertTestData([ self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24, fakedb.SchedulerChange(schedulerid=24, changeid=5, important=0), ]) yield self.db.schedulers.classifyChanges( 24, {3: True, 4: False, 5: True, 6: False}) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True, 6: False}) def test_signature_flushChangeClassifications(self): @self.assertArgSpecMatches( self.db.schedulers.flushChangeClassifications) def flushChangeClassifications(self, schedulerid, less_than=None): pass @defer.inlineCallbacks def test_flushChangeClassifications(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1)) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True}) yield self.db.schedulers.flushChangeClassifications(24) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {}) @defer.inlineCallbacks def test_flushChangeClassifications_less_than(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1)) yield self.db.schedulers.flushChangeClassifications(24, less_than=5) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {5: True}) def test_signature_getChangeClassifications(self): @self.assertArgSpecMatches(self.db.schedulers.getChangeClassifications) def getChangeClassifications(self, schedulerid, branch=-1, repository=-1, project=-1, codebase=-1): pass @defer.inlineCallbacks def test_getChangeClassifications(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1), (6, 1)) res = yield self.db.schedulers.getChangeClassifications(24) self.assertEqual(res, {3: True, 4: False, 5: True, 6: True}) @defer.inlineCallbacks def test_getChangeClassifications_branch(self): yield self.insertTestData([self.ss92, self.change3, self.change4, self.change5, self.change6, self.scheduler24]) yield self.addClassifications(24, (3, 1), (4, 0), (5, 1), (6, 1)) res = yield self.db.schedulers.getChangeClassifications(24, branch='sql') self.assertEqual(res, {6: True}) def test_signature_findSchedulerId(self): @self.assertArgSpecMatches(self.db.schedulers.findSchedulerId) def findSchedulerId(self, name): pass @defer.inlineCallbacks def test_findSchedulerId_new(self): id = yield self.db.schedulers.findSchedulerId('schname') sch = yield self.db.schedulers.getScheduler(id) self.assertEqual(sch['name'], 'schname') @defer.inlineCallbacks def test_findSchedulerId_existing(self): id = yield self.db.schedulers.findSchedulerId('schname') id2 = yield self.db.schedulers.findSchedulerId('schname') self.assertEqual(id, id2) def test_signature_setSchedulerMaster(self): @self.assertArgSpecMatches(self.db.schedulers.setSchedulerMaster) def setSchedulerMaster(self, schedulerid, masterid): pass @defer.inlineCallbacks def test_setSchedulerMaster_fresh(self): yield self.insertTestData([self.scheduler24, self.master13]) yield self.db.schedulers.setSchedulerMaster(24, 13) sch = yield self.db.schedulers.getScheduler(24) self.assertEqual(sch['masterid'], 13) @defer.inlineCallbacks def test_setSchedulerMaster_inactive_but_linked(self): d = self.insertTestData([ self.master13, self.scheduler25, self.master14, self.scheduler25master, ]) d.addCallback(lambda _: self.db.schedulers.setSchedulerMaster(25, 13)) yield self.assertFailure(d, schedulers.SchedulerAlreadyClaimedError) @defer.inlineCallbacks def test_setSchedulerMaster_inactive_but_linked_to_this_master(self): yield self.insertTestData([ self.scheduler25, self.master14, self.scheduler25master, ]) yield self.db.schedulers.setSchedulerMaster(25, 14) @defer.inlineCallbacks def test_setSchedulerMaster_active(self): d = self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, ]) d.addCallback(lambda _: self.db.schedulers.setSchedulerMaster(24, 14)) yield self.assertFailure(d, schedulers.SchedulerAlreadyClaimedError) @defer.inlineCallbacks def test_setSchedulerMaster_None(self): yield self.insertTestData([ self.scheduler25, self.master14, self.scheduler25master, ]) yield self.db.schedulers.setSchedulerMaster(25, None) sch = yield self.db.schedulers.getScheduler(25) self.assertEqual(sch['masterid'], None) @defer.inlineCallbacks def test_setSchedulerMaster_None_unowned(self): yield self.insertTestData([self.scheduler25]) yield self.db.schedulers.setSchedulerMaster(25, None) sch = yield self.db.schedulers.getScheduler(25) self.assertEqual(sch['masterid'], None) def test_signature_getScheduler(self): @self.assertArgSpecMatches(self.db.schedulers.getScheduler) def getScheduler(self, schedulerid): pass @defer.inlineCallbacks def test_getScheduler(self): yield self.insertTestData([self.scheduler24]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=None)) @defer.inlineCallbacks def test_getScheduler_missing(self): sch = yield self.db.schedulers.getScheduler(24) self.assertEqual(sch, None) @defer.inlineCallbacks def test_getScheduler_active(self): yield self.insertTestData([self.scheduler24, self.master13, self.scheduler24master]) sch = yield self.db.schedulers.getScheduler(24) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=24, name='schname', enabled=True, masterid=13)) @defer.inlineCallbacks def test_getScheduler_inactive_but_linked(self): yield self.insertTestData([self.scheduler25, self.master14, self.scheduler25master]) sch = yield self.db.schedulers.getScheduler(25) validation.verifyDbDict(self, 'schedulerdict', sch) self.assertEqual(sch, dict( id=25, name='schname2', enabled=True, masterid=14)) # row exists, but marked inactive def test_signature_getSchedulers(self): @self.assertArgSpecMatches(self.db.schedulers.getSchedulers) def getSchedulers(self, active=None, masterid=None): pass @defer.inlineCallbacks def test_getSchedulers(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25, ]) def schKey(sch): return sch['id'] schlist = yield self.db.schedulers.getSchedulers() [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist, key=schKey), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), dict(id=25, name='schname2', enabled=True, masterid=None), ], key=schKey)) @defer.inlineCallbacks def test_getSchedulers_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25, ]) schlist = yield self.db.schedulers.getSchedulers(masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) @defer.inlineCallbacks def test_getSchedulers_active(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers(active=True) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) @defer.inlineCallbacks def test_getSchedulers_active_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers( active=True, masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=24, name='schname', enabled=True, masterid=13), ])) schlist = yield self.db.schedulers.getSchedulers( active=True, masterid=14) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) @defer.inlineCallbacks def test_getSchedulers_inactive(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers(active=False) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), sorted([ dict(id=25, name='schname2', enabled=True, masterid=None), ])) @defer.inlineCallbacks def test_getSchedulers_inactive_masterid(self): yield self.insertTestData([ self.scheduler24, self.master13, self.scheduler24master, self.scheduler25 ]) schlist = yield self.db.schedulers.getSchedulers( active=False, masterid=13) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) schlist = yield self.db.schedulers.getSchedulers( active=False, masterid=14) [validation.verifyDbDict(self, 'schedulerdict', sch) for sch in schlist] self.assertEqual(sorted(schlist), []) # always returns [] by spec! class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData def addClassifications(self, schedulerid, *classifications): self.db.schedulers.fakeClassifications(schedulerid, dict(classifications)) return defer.succeed(None) class TestRealDB(db.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['changes', 'schedulers', 'masters', 'sourcestamps', 'patches', 'scheduler_masters', 'scheduler_changes']) self.db.schedulers = \ schedulers.SchedulersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def addClassifications(self, schedulerid, *classifications): def thd(conn): q = self.db.model.scheduler_changes.insert() conn.execute(q, [ dict(changeid=c[0], schedulerid=schedulerid, important=c[1]) for c in classifications]) yield self.db.pool.do(thd) buildbot-2.6.0/master/buildbot/test/unit/test_db_sourcestamps.py000066400000000000000000000412351361162603000251710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import sourcestamps from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime CREATED_AT = 927845299 def sourceStampKey(sourceStamp): return (sourceStamp['repository'], sourceStamp['branch'], sourceStamp['created_at']) class Tests(interfaces.InterfaceTests): def test_signature_findSourceStampId(self): @self.assertArgSpecMatches(self.db.sourcestamps.findSourceStampId) def findSourceStampId(self, branch=None, revision=None, repository=None, project=None, codebase=None, patch_body=None, patch_level=None, patch_author=None, patch_comment=None, patch_subdir=None): pass def test_signature_getSourceStamp(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStamp) def getSourceStamp(self, key, no_cache=False): pass def test_signature_getSourceStamps(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStamps) def getSourceStamps(self): pass @defer.inlineCallbacks def test_findSourceStampId_simple(self): self.reactor.advance(CREATED_AT) ssid = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') ssdict = yield self.db.sourcestamps.getSourceStamp(ssid) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'branch': 'production', 'codebase': 'cb', 'patchid': None, 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'stamper', 'repository': 'test://repo', 'revision': 'abdef', 'ssid': ssid, 'created_at': epoch2datetime(CREATED_AT), }) @defer.inlineCallbacks def test_findSourceStampId_simple_unique(self): ssid1 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') ssid2 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='xxxxx', # different revision repository='test://repo', codebase='cb', project='stamper') ssid3 = yield self.db.sourcestamps.findSourceStampId( # same as ssid1 branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper') self.assertEqual(ssid1, ssid3) self.assertNotEqual(ssid1, ssid2) @defer.inlineCallbacks def test_findSourceStampId_simple_unique_patch(self): ssid1 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'++ --', patch_level=1, patch_author='me', patch_comment='hi', patch_subdir='.') ssid2 = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'++ --', patch_level=1, patch_author='me', patch_comment='hi', patch_subdir='.') # even with the same patch contents, we get different ids self.assertNotEqual(ssid1, ssid2) @defer.inlineCallbacks def test_findSourceStampId_patch(self): self.reactor.advance(CREATED_AT) ssid = yield self.db.sourcestamps.findSourceStampId( branch='production', revision='abdef', repository='test://repo', codebase='cb', project='stamper', patch_body=b'my patch', patch_level=3, patch_subdir='master/', patch_author='me', patch_comment="comment") ssdict = yield self.db.sourcestamps.getSourceStamp(ssid) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'branch': 'production', 'codebase': 'cb', 'patchid': 1, 'patch_author': 'me', 'patch_body': b'my patch', 'patch_comment': 'comment', 'patch_level': 3, 'patch_subdir': 'master/', 'project': 'stamper', 'repository': 'test://repo', 'revision': 'abdef', 'created_at': epoch2datetime(CREATED_AT), 'ssid': ssid, }) @defer.inlineCallbacks def test_getSourceStamp_simple(self): yield self.insertTestData([ fakedb.SourceStamp(id=234, branch='br', revision='rv', repository='rep', codebase='cb', project='prj', created_at=CREATED_AT), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(ssdict, { 'ssid': 234, 'created_at': epoch2datetime(CREATED_AT), 'branch': 'br', 'revision': 'rv', 'repository': 'rep', 'codebase': 'cb', 'project': 'prj', 'patchid': None, 'patch_body': None, 'patch_level': None, 'patch_subdir': None, 'patch_author': None, 'patch_comment': None, }) @defer.inlineCallbacks def test_getSourceStamp_simple_None(self): "check that NULL branch and revision are handled correctly" yield self.insertTestData([ fakedb.SourceStamp(id=234, branch=None, revision=None, repository='rep', codebase='cb', project='prj'), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual((ssdict['branch'], ssdict['revision']), (None, None)) @defer.inlineCallbacks def test_getSourceStamp_patch(self): yield self.insertTestData([ fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=234, patchid=99), ]) ssdict = yield self.db.sourcestamps.getSourceStamp(234) validation.verifyDbDict(self, 'ssdict', ssdict) self.assertEqual(dict((k, v) for k, v in ssdict.items() if k.startswith('patch_')), dict(patch_body=b'hello, world', patch_level=3, patch_author='bar', patch_comment='foo', patch_subdir='/foo')) @defer.inlineCallbacks def test_getSourceStamp_nosuch(self): ssdict = yield self.db.sourcestamps.getSourceStamp(234) self.assertEqual(ssdict, None) @defer.inlineCallbacks def test_getSourceStamps(self): yield self.insertTestData([ fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=234, revision='r', project='p', codebase='c', repository='rep', branch='b', patchid=99, created_at=CREATED_AT), fakedb.SourceStamp(id=235, revision='r2', project='p2', codebase='c2', repository='rep2', branch='b2', patchid=None, created_at=CREATED_AT + 10), ]) sourcestamps = yield self.db.sourcestamps.getSourceStamps() self.assertEqual(sorted(sourcestamps, key=sourceStampKey), sorted([{ 'branch': 'b', 'codebase': 'c', 'patch_author': 'bar', 'patchid': 99, 'patch_body': b'hello, world', 'patch_comment': 'foo', 'patch_level': 3, 'patch_subdir': '/foo', 'project': 'p', 'repository': 'rep', 'revision': 'r', 'created_at': epoch2datetime(CREATED_AT), 'ssid': 234, }, { 'branch': 'b2', 'codebase': 'c2', 'patchid': None, 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'project': 'p2', 'repository': 'rep2', 'revision': 'r2', 'created_at': epoch2datetime(CREATED_AT + 10), 'ssid': 235, }], key=sourceStampKey)) @defer.inlineCallbacks def test_getSourceStamps_empty(self): sourcestamps = yield self.db.sourcestamps.getSourceStamps() self.assertEqual(sourcestamps, []) def test_signature_getSourceStampsForBuild(self): @self.assertArgSpecMatches(self.db.sourcestamps.getSourceStampsForBuild) def getSourceStampsForBuild(self, buildid): pass @defer.inlineCallbacks def do_test_getSourceStampsForBuild(self, rows, buildid, expected): yield self.insertTestData(rows) sourcestamps = yield self.db.sourcestamps.getSourceStampsForBuild(buildid) self.assertEqual(sorted(sourcestamps, key=sourceStampKey), sorted(expected, key=sourceStampKey)) def test_getSourceStampsForBuild_OneCodeBase(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A', created_at=CREATED_AT, revision="aaa"), # fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222), ] expected = [{ 'branch': 'master', 'codebase': 'A', 'created_at': epoch2datetime(CREATED_AT), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'aaa', 'ssid': 234}] return self.do_test_getSourceStampsForBuild(rows, 50, expected) def test_getSourceStampsForBuild_3CodeBases(self): rows = [fakedb.Master(id=88, name="bar"), fakedb.Worker(id=13, name='one'), fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A', created_at=CREATED_AT, revision="aaa"), fakedb.SourceStamp(id=235, codebase='B', created_at=CREATED_AT + 10, revision="bbb"), fakedb.SourceStamp(id=236, codebase='C', created_at=CREATED_AT + 20, revision="ccc"), # fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildsetSourceStamp(sourcestampid=235, buildsetid=30), fakedb.BuildsetSourceStamp(sourcestampid=236, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.Build(id=50, buildrequestid=19, number=5, masterid=88, builderid=77, state_string="test", workerid=13, started_at=1304262222), ] expected = [{'branch': 'master', 'codebase': 'A', 'created_at': epoch2datetime(CREATED_AT), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'aaa', 'ssid': 234}, {'branch': 'master', 'codebase': 'B', 'created_at': epoch2datetime(CREATED_AT + 10), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'bbb', 'ssid': 235}, {'branch': 'master', 'codebase': 'C', 'created_at': epoch2datetime(CREATED_AT + 20), 'patch_author': None, 'patch_body': None, 'patch_comment': None, 'patch_level': None, 'patch_subdir': None, 'patchid': None, 'project': 'proj', 'repository': 'repo', 'revision': 'ccc', 'ssid': 236}] return self.do_test_getSourceStampsForBuild(rows, 50, expected) class RealTests(Tests): pass class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['sourcestamps', 'patches', 'masters', 'workers', 'buildsets', 'builders', 'buildrequests', 'buildset_sourcestamps', 'builds']) self.db.sourcestamps = \ sourcestamps.SourceStampsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_state.py000066400000000000000000000171341361162603000235620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.db import state from buildbot.test.fake import fakedb from buildbot.test.util import connector_component from buildbot.test.util import db class TestStateConnectorComponent( connector_component.ConnectorComponentMixin, db.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['objects', 'object_state']) self.db.state = state.StateConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() @defer.inlineCallbacks def test_getObjectId_new(self): objectid = yield self.db.state.getObjectId('someobj', 'someclass') yield self.assertNotEqual(objectid, None) def thd(conn): q = self.db.model.objects.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.id, r.name, r.class_name) for r in rows], [(objectid, 'someobj', 'someclass')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getObjectId_existing(self): yield self.insertTestData([ fakedb.Object(id=19, name='someobj', class_name='someclass')]) objectid = yield self.db.state.getObjectId('someobj', 'someclass') self.assertEqual(objectid, 19) @defer.inlineCallbacks def test_getObjectId_conflict(self): # set up to insert a row between looking for an existing object # and adding a new one, triggering the fallback to re-running # the select. def hook(conn): conn.execute(self.db.model.objects.insert(), id=27, name='someobj', class_name='someclass') self.db.state._test_timing_hook = hook objectid = yield self.db.state.getObjectId('someobj', 'someclass') self.assertEqual(objectid, 27) @defer.inlineCallbacks def test_getObjectId_new_big_name(self): objectid = yield self.db.state.getObjectId('someobj' * 150, 'someclass') expn = 'someobj' * 9 + 's132bf9b89b0cdbc040d1ebc69e0dbee85dff720a' self.assertNotEqual(objectid, None) def thd(conn): q = self.db.model.objects.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.id, r.name, r.class_name) for r in rows], [(objectid, expn, 'someclass')]) yield self.db.pool.do(thd) def test_getState_missing(self): d = self.db.state.getState(10, 'nosuch') return self.assertFailure(d, KeyError) @defer.inlineCallbacks def test_getState_missing_default(self): val = yield self.db.state.getState(10, 'nosuch', 'abc') self.assertEqual(val, 'abc') @defer.inlineCallbacks def test_getState_missing_default_None(self): val = yield self.db.state.getState(10, 'nosuch', None) self.assertEqual(val, None) @defer.inlineCallbacks def test_getState_present(self): yield self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='[1,2]'), ]) val = yield self.db.state.getState(10, 'x') self.assertEqual(val, [1, 2]) def test_getState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='ff[1'), ]) d.addCallback(lambda _: self.db.state.getState(10, 'x')) return self.assertFailure(d, TypeError) @defer.inlineCallbacks def test_setState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '[1, 2]')]) yield self.db.pool.do(thd) def test_setState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), ]) d.addCallback(lambda _: self.db.state.setState(10, 'x', self)) # self is not JSON-able.. return self.assertFailure(d, TypeError) @defer.inlineCallbacks def test_setState_existing(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), fakedb.ObjectState(objectid=10, name='x', value_json='99'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '[1, 2]')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_setState_conflict(self): def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual( [(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '22')]) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_atomicCreateState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, [1, 2]) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, [1, 2]) @defer.inlineCallbacks def test_atomicCreateState_conflict(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, 22) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, 22) @defer.inlineCallbacks def test_atomicCreateState_nojsonable(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) d = self.db.state.atomicCreateState(10, 'x', object) yield self.assertFailure(d, TypeError) buildbot-2.6.0/master/buildbot/test/unit/test_db_steps.py000066400000000000000000000345461361162603000236060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from twisted.trial import unittest from buildbot.db import steps from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import epoch2datetime TIME1 = 1304262222 TIME2 = 1304262223 TIME3 = 1304262224 TIME4 = 1304262235 class Tests(TestReactorMixin, interfaces.InterfaceTests): # common sample data backgroundData = [ fakedb.Worker(id=47, name='linux'), fakedb.Buildset(id=20), fakedb.Builder(id=88, name='b1'), fakedb.BuildRequest(id=41, buildsetid=20, builderid=88), fakedb.Master(id=88), fakedb.Build(id=30, buildrequestid=41, number=7, masterid=88, builderid=88, workerid=47), fakedb.Build(id=31, buildrequestid=41, number=8, masterid=88, builderid=88, workerid=47), ] stepRows = [ fakedb.Step(id=70, number=0, name='one', buildid=30, started_at=TIME1, complete_at=TIME2, state_string='test', results=0), fakedb.Step(id=71, number=1, name='two', buildid=30, started_at=TIME2, complete_at=TIME3, state_string='test', results=2, urls_json='["http://url"]', hidden=1), fakedb.Step(id=72, number=2, name='three', buildid=30, started_at=TIME3), fakedb.Step(id=73, number=0, name='wrong-build', buildid=31), ] stepDicts = [ {'id': 70, 'buildid': 30, 'number': 0, 'name': 'one', 'results': 0, 'started_at': epoch2datetime(TIME1), 'complete_at': epoch2datetime(TIME2), 'state_string': 'test', 'urls': [], 'hidden': False}, {'id': 71, 'buildid': 30, 'number': 1, 'name': 'two', 'results': 2, 'started_at': epoch2datetime(TIME2), 'complete_at': epoch2datetime(TIME3), 'state_string': 'test', 'urls': ['http://url'], 'hidden': True}, {'id': 72, 'buildid': 30, 'number': 2, 'name': 'three', 'results': None, 'started_at': epoch2datetime(TIME3), 'complete_at': None, 'state_string': '', 'urls': [], 'hidden': False}, ] def setUp(self): self.setUpTestReactor() # signature tests def test_signature_getStep(self): @self.assertArgSpecMatches(self.db.steps.getStep) def getStep(self, stepid=None, buildid=None, number=None, name=None): pass def test_signature_getSteps(self): @self.assertArgSpecMatches(self.db.steps.getSteps) def getSteps(self, buildid): pass def test_signature_addStep(self): @self.assertArgSpecMatches(self.db.steps.addStep) def addStep(self, buildid, name, state_string): pass def test_signature_startStep(self): @self.assertArgSpecMatches(self.db.steps.startStep) def addStep(self, stepid): pass def test_signature_setStepStateString(self): @self.assertArgSpecMatches(self.db.steps.setStepStateString) def setStepStateString(self, stepid, state_string): pass def test_signature_finishStep(self): @self.assertArgSpecMatches(self.db.steps.finishStep) def finishStep(self, stepid, results, hidden): pass # method tests @defer.inlineCallbacks def test_getStep(self): yield self.insertTestData(self.backgroundData + [self.stepRows[0]]) stepdict = yield self.db.steps.getStep(70) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict, self.stepDicts[0]) @defer.inlineCallbacks def test_getStep_missing(self): stepdict = yield self.db.steps.getStep(50) self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_number(self): yield self.insertTestData(self.backgroundData + [self.stepRows[1]]) stepdict = yield self.db.steps.getStep(buildid=30, number=1) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['id'], 71) @defer.inlineCallbacks def test_getStep_number_missing(self): yield self.insertTestData(self.backgroundData + [self.stepRows[1]]) stepdict = yield self.db.steps.getStep(buildid=30, number=9) self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_name(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) stepdict = yield self.db.steps.getStep(buildid=30, name='three') validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['id'], 72) @defer.inlineCallbacks def test_getStep_name_missing(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) stepdict = yield self.db.steps.getStep(buildid=30, name='five') self.assertEqual(stepdict, None) @defer.inlineCallbacks def test_getStep_invalid(self): d = self.db.steps.getStep(buildid=30) yield self.assertFailure(d, RuntimeError) @defer.inlineCallbacks def test_getSteps(self): yield self.insertTestData(self.backgroundData + self.stepRows) stepdicts = yield self.db.steps.getSteps(buildid=30) [validation.verifyDbDict(self, 'stepdict', stepdict) for stepdict in stepdicts] self.assertEqual(stepdicts, self.stepDicts[:3]) @defer.inlineCallbacks def test_getSteps_none(self): yield self.insertTestData(self.backgroundData + self.stepRows) stepdicts = yield self.db.steps.getSteps(buildid=33) self.assertEqual(stepdicts, []) @defer.inlineCallbacks def test_addStep_getStep(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData) stepid, number, name = yield self.db.steps.addStep(buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (0, 'new')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict, { 'id': stepid, 'buildid': 30, 'name': 'new', 'number': 0, 'started_at': epoch2datetime(TIME1), 'complete_at': None, 'results': None, 'state_string': 'new', 'urls': [], 'hidden': False}) @defer.inlineCallbacks def test_addStep_getStep_existing_step(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [self.stepRows[0]]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (1, 'new')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_addStep_getStep_name_collisions(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='new', buildid=30), fakedb.Step(id=74, number=1, name='new_1', buildid=30), fakedb.Step(id=75, number=2, name='new_2', buildid=30), fakedb.Step(id=76, number=3, name='new_step', buildid=30), ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='new', state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (4, 'new_3')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_setStepStateString(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.setStepStateString(stepid=72, state_string='aaa') stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['state_string'], 'aaa') @defer.inlineCallbacks def test_addURL(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.addURL(stepid=72, name='foo', url='bar') stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['urls'], [{'name': 'foo', 'url': 'bar'}]) @defer.inlineCallbacks def test_addURL_race(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield defer.gatherResults([ # only a tiny sleep is required to see the problem. self.db.steps.addURL(stepid=72, name='foo', url='bar', _racehook=lambda: time.sleep(.01)), self.db.steps.addURL(stepid=72, name='foo2', url='bar2')]) stepdict = yield self.db.steps.getStep(stepid=72) def urlKey(url): return url['name'] # order is not guaranteed though self.assertEqual(sorted(stepdict['urls'], key=urlKey), sorted([{'name': 'foo', 'url': 'bar'}, {'name': 'foo2', 'url': 'bar2'}], key=urlKey)) @defer.inlineCallbacks def test_addURL_no_duplicate(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield defer.gatherResults([ self.db.steps.addURL(stepid=72, name='foo', url='bar'), self.db.steps.addURL(stepid=72, name='foo', url='bar')]) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['urls'], [{'name': 'foo', 'url': 'bar'}]) @defer.inlineCallbacks def test_finishStep(self): self.reactor.advance(TIME2) yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.finishStep(stepid=72, results=11, hidden=False) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['results'], 11) self.assertEqual(stepdict['complete_at'], epoch2datetime(TIME2)) self.assertEqual(stepdict['hidden'], False) @defer.inlineCallbacks def test_finishStep_hidden(self): yield self.insertTestData(self.backgroundData + [self.stepRows[2]]) yield self.db.steps.finishStep(stepid=72, results=11, hidden=True) stepdict = yield self.db.steps.getStep(stepid=72) self.assertEqual(stepdict['hidden'], True) class RealTests(Tests): # the fake connector doesn't deal with this edge case @defer.inlineCallbacks def test_addStep_getStep_name_collisions_too_long(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='a' * 49, buildid=30), fakedb.Step(id=74, number=1, name='a' * 48 + '_1', buildid=30), ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='a' * 49, state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (2, 'a' * 48 + '_2')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) @defer.inlineCallbacks def test_addStep_getStep_name_collisions_too_long_extra_digits(self): self.reactor.advance(TIME1) yield self.insertTestData(self.backgroundData + [ fakedb.Step(id=73, number=0, name='a' * 50, buildid=30), ] + [fakedb.Step(id=73 + i, number=i, name='a' * 48 + ('_%d' % i), buildid=30) for i in range(1, 10) ] + [fakedb.Step(id=73 + i, number=i, name='a' * 47 + ('_%d' % i), buildid=30) for i in range(10, 100) ]) stepid, number, name = yield self.db.steps.addStep( buildid=30, name='a' * 50, state_string='new') yield self.db.steps.startStep(stepid=stepid) self.assertEqual((number, name), (100, 'a' * 46 + '_100')) stepdict = yield self.db.steps.getStep(stepid=stepid) validation.verifyDbDict(self, 'stepdict', stepdict) self.assertEqual(stepdict['number'], number) self.assertEqual(stepdict['name'], name) class TestFakeDB(Tests, unittest.TestCase): def setUp(self): super().setUp() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) self.db.steps = steps.StepsConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_db_users.py000066400000000000000000000406201361162603000235770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sqlalchemy from twisted.internet import defer from twisted.trial import unittest from buildbot.db import users from buildbot.test.fake import fakedb from buildbot.test.util import connector_component class TestUsersConnectorComponent(connector_component.ConnectorComponentMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['users', 'users_info', 'changes', 'change_users', 'sourcestamps', 'patches']) self.db.users = users.UsersConnectorComponent(self.db) def tearDown(self): return self.tearDownConnectorComponent() # sample user data user1_rows = [ fakedb.User(uid=1, identifier='soap'), fakedb.UserInfo(uid=1, attr_type='IPv9', attr_data='0578cc6.8db024'), ] user2_rows = [ fakedb.User(uid=2, identifier='lye'), fakedb.UserInfo(uid=2, attr_type='git', attr_data='Tyler Durden '), fakedb.UserInfo(uid=2, attr_type='irc', attr_data='durden') ] user3_rows = [ fakedb.User(uid=3, identifier='marla', bb_username='marla', bb_password='cancer') ] user1_dict = { 'uid': 1, 'identifier': 'soap', 'bb_username': None, 'bb_password': None, 'IPv9': '0578cc6.8db024', } user2_dict = { 'uid': 2, 'identifier': 'lye', 'bb_username': None, 'bb_password': None, 'irc': 'durden', 'git': 'Tyler Durden ' } user3_dict = { 'uid': 3, 'identifier': 'marla', 'bb_username': 'marla', 'bb_password': 'cancer', } # tests @defer.inlineCallbacks def test_addUser_new(self): uid = yield self.db.users.findUserByAttr(identifier='soap', attr_type='subspace_net_handle', attr_data='Durden0924') def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'subspace_net_handle') self.assertEqual(infos[0].attr_data, 'Durden0924') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_existing(self): yield self.insertTestData(self.user1_rows) uid = yield self.db.users.findUserByAttr( identifier='soapy', attr_type='IPv9', attr_data='0578cc6.8db024') self.assertEqual(uid, 1) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') # not changed! self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'IPv9') self.assertEqual(infos[0].attr_data, '0578cc6.8db024') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_findUser_existing(self): yield self.insertTestData( self.user1_rows + self.user2_rows + self.user3_rows) uid = yield self.db.users.findUserByAttr( identifier='lye', attr_type='git', attr_data='Tyler Durden ') self.assertEqual(uid, 2) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(( sorted([tuple(u) for u in users]), sorted([tuple(i) for i in infos]) ), ( [ (1, 'soap', None, None), (2, 'lye', None, None), (3, 'marla', 'marla', 'cancer'), ], [ (1, 'IPv9', '0578cc6.8db024'), (2, 'git', 'Tyler Durden '), (2, 'irc', 'durden') ])) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_race(self): def race_thd(conn): # note that this assumes that both inserts can happen "at once". # This is the case for DB engines that support transactions, but # not for MySQL. so this test does not detect the potential MySQL # failure, which will generally result in a spurious failure. conn.execute(self.db.model.users.insert(), uid=99, identifier='soap') conn.execute(self.db.model.users_info.insert(), uid=99, attr_type='subspace_net_handle', attr_data='Durden0924') uid = yield self.db.users.findUserByAttr(identifier='soap', attr_type='subspace_net_handle', attr_data='Durden0924', _race_hook=race_thd) self.assertEqual(uid, 99) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute(users_tbl.select()).fetchall() infos = conn.execute(users_info_tbl.select()).fetchall() self.assertEqual(len(users), 1) self.assertEqual(users[0].uid, uid) self.assertEqual(users[0].identifier, 'soap') self.assertEqual(len(infos), 1) self.assertEqual(infos[0].uid, uid) self.assertEqual(infos[0].attr_type, 'subspace_net_handle') self.assertEqual(infos[0].attr_data, 'Durden0924') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_addUser_existing_identifier(self): # see http://trac.buildbot.net/ticket/2587 yield self.insertTestData(self.user1_rows) uid = yield self.db.users.findUserByAttr( identifier='soap', # same identifier attr_type='IPv9', attr_data='fffffff.ffffff') # different attr # creates a new user self.assertNotEqual(uid, 1) def thd(conn): users_tbl = self.db.model.users users_info_tbl = self.db.model.users_info users = conn.execute( users_tbl.select(order_by=users_tbl.c.identifier)).fetchall() infos = conn.execute( users_info_tbl.select(users_info_tbl.c.uid == uid)).fetchall() self.assertEqual(len(users), 2) self.assertEqual(users[1].uid, uid) self.assertEqual(users[1].identifier, 'soap_2') # unique'd self.assertEqual(len(infos), 1) self.assertEqual(infos[0].attr_type, 'IPv9') self.assertEqual(infos[0].attr_data, 'fffffff.ffffff') yield self.db.pool.do(thd) @defer.inlineCallbacks def test_getUser(self): yield self.insertTestData(self.user1_rows) usdict = yield self.db.users.getUser(1) self.assertEqual(usdict, self.user1_dict) @defer.inlineCallbacks def test_getUser_bb(self): yield self.insertTestData(self.user3_rows) usdict = yield self.db.users.getUser(3) self.assertEqual(usdict, self.user3_dict) @defer.inlineCallbacks def test_getUser_multi_attr(self): yield self.insertTestData(self.user2_rows) usdict = yield self.db.users.getUser(2) self.assertEqual(usdict, self.user2_dict) @defer.inlineCallbacks def test_getUser_no_match(self): yield self.insertTestData(self.user1_rows) none = yield self.db.users.getUser(3) self.assertEqual(none, None) @defer.inlineCallbacks def test_getUsers_none(self): res = yield self.db.users.getUsers() self.assertEqual(res, []) @defer.inlineCallbacks def test_getUsers(self): yield self.insertTestData(self.user1_rows) res = yield self.db.users.getUsers() self.assertEqual(res, [dict(uid=1, identifier='soap')]) @defer.inlineCallbacks def test_getUsers_multiple(self): yield self.insertTestData(self.user1_rows + self.user2_rows) res = yield self.db.users.getUsers() self.assertEqual(res, [dict(uid=1, identifier='soap'), dict(uid=2, identifier='lye')]) @defer.inlineCallbacks def test_getUserByUsername(self): yield self.insertTestData(self.user3_rows) res = yield self.db.users.getUserByUsername("marla") self.assertEqual(res, self.user3_dict) @defer.inlineCallbacks def test_getUserByUsername_no_match(self): yield self.insertTestData(self.user3_rows) none = yield self.db.users.getUserByUsername("tyler") self.assertEqual(none, None) @defer.inlineCallbacks def test_updateUser_existing_type(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv9', attr_data='abcd.1234') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], 'abcd.1234') self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_updateUser_new_type(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv4', attr_data='123.134.156.167') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv4'], '123.134.156.167') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_updateUser_identifier(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, identifier='lye') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'lye') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_updateUser_bb(self): yield self.insertTestData(self.user3_rows) yield self.db.users.updateUser(uid=3, bb_username='boss', bb_password='fired') usdict = yield self.db.users.getUser(3) self.assertEqual(usdict['bb_username'], 'boss') self.assertEqual(usdict['bb_password'], 'fired') self.assertEqual(usdict['identifier'], 'marla') # no change @defer.inlineCallbacks def test_updateUser_all(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser( uid=1, identifier='lye', bb_username='marla', bb_password='cancer', attr_type='IPv4', attr_data='123.134.156.167') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'lye') self.assertEqual(usdict['bb_username'], 'marla') self.assertEqual(usdict['bb_password'], 'cancer') self.assertEqual(usdict['IPv4'], '123.134.156.167') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_updateUser_race(self): # called from the db thread, this opens a *new* connection (to avoid # the existing transaction) and executes a conflicting insert in that # connection. This will cause the insert in the db method to fail, and # the data in this insert (8.8.8.8) will appear below. transaction_wins = [] if (self.db.pool.engine.dialect.name == 'sqlite' and self.db.pool.engine.url.database not in [None, ':memory:']): # It's not easy to work with file-based SQLite via multiple # connections, because SQLAlchemy (in it's default configuration) # locks file during working session. # TODO: This probably can be supported. raise unittest.SkipTest( "It's hard to test race condition with not in-memory SQLite") def race_thd(conn): conn = self.db.pool.engine.connect() try: r = conn.execute(self.db.model.users_info.insert(), uid=1, attr_type='IPv4', attr_data='8.8.8.8') r.close() except sqlalchemy.exc.OperationalError: # some engine (mysql innodb) will enforce lock until the transaction is over transaction_wins.append(True) # scope variable, we modify a list so that modification is visible in parent scope yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=1, attr_type='IPv4', attr_data='123.134.156.167', _race_hook=race_thd) usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'soap') if transaction_wins: self.assertEqual(usdict['IPv4'], '123.134.156.167') else: self.assertEqual(usdict['IPv4'], '8.8.8.8') self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_update_NoMatch_identifier(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=3, identifier='abcd') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['identifier'], 'soap') # no change @defer.inlineCallbacks def test_update_NoMatch_attribute(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser(uid=3, attr_type='abcd', attr_data='efgh') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_update_NoMatch_bb(self): yield self.insertTestData(self.user1_rows) yield self.db.users.updateUser( uid=3, attr_type='marla', attr_data='cancer') usdict = yield self.db.users.getUser(1) self.assertEqual(usdict['IPv9'], '0578cc6.8db024') # no change @defer.inlineCallbacks def test_removeUser_uid(self): yield self.insertTestData(self.user1_rows) yield self.db.users.removeUser(1) def thd(conn): r = conn.execute(self.db.model.users.select()) r = r.fetchall() self.assertEqual(len(r), 0) yield self.db.pool.do(thd) @defer.inlineCallbacks def test_removeNoMatch(self): yield self.insertTestData(self.user1_rows) yield self.db.users.removeUser(uid=3) @defer.inlineCallbacks def test_identifierToUid_NoMatch(self): res = yield self.db.users.identifierToUid(identifier="soap") self.assertEqual(res, None) @defer.inlineCallbacks def test_identifierToUid_match(self): yield self.insertTestData(self.user1_rows) res = yield self.db.users.identifierToUid(identifier="soap") self.assertEqual(res, 1) buildbot-2.6.0/master/buildbot/test/unit/test_db_workers.py000066400000000000000000000761111361162603000241360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.db import workers from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import connector_component from buildbot.test.util import interfaces from buildbot.test.util import querylog from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin def workerKey(worker): return worker['id'] def configuredOnKey(worker): return (worker['builderid'], worker['masterid']) class Tests(interfaces.InterfaceTests): # common sample data baseRows = [ fakedb.Master(id=10, name='m10'), fakedb.Master(id=11, name='m11'), fakedb.Builder(id=20, name='a'), fakedb.Builder(id=21, name='b'), fakedb.Builder(id=22, name='c'), fakedb.Worker(id=30, name='zero'), fakedb.Worker(id=31, name='one'), ] multipleMasters = [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.BuilderMaster(id=13, builderid=21, masterid=10), fakedb.BuilderMaster(id=14, builderid=20, masterid=11), fakedb.BuilderMaster(id=15, builderid=22, masterid=11), fakedb.BuilderMaster(id=16, builderid=22, masterid=10), fakedb.ConfiguredWorker( id=3012, workerid=30, buildermasterid=12), fakedb.ConfiguredWorker( id=3013, workerid=30, buildermasterid=13), fakedb.ConfiguredWorker( id=3014, workerid=30, buildermasterid=14), fakedb.ConfiguredWorker( id=3114, workerid=31, buildermasterid=14), fakedb.ConfiguredWorker( id=3115, workerid=31, buildermasterid=15), fakedb.ConnectedWorker(id=3010, workerid=30, masterid=10), fakedb.ConnectedWorker(id=3111, workerid=31, masterid=11), ] # sample worker data, with id's avoiding the postgres id sequence BOGUS_NAME = 'bogus' W1_NAME, W1_ID, W1_INFO = 'w1', 100, {'a': 1} worker1_rows = [ fakedb.Worker(id=W1_ID, name=W1_NAME, info=W1_INFO), ] W2_NAME, W2_ID, W2_INFO = 'w2', 200, {'a': 1, 'b': 2} worker2_rows = [ fakedb.Worker(id=W2_ID, name=W2_NAME, info=W2_INFO), ] # tests def test_signature_findWorkerId(self): @self.assertArgSpecMatches(self.db.workers.findWorkerId) def findWorkerId(self, name): pass def test_signature_getWorker(self): @self.assertArgSpecMatches(self.db.workers.getWorker) def getWorker(self, workerid=None, name=None, masterid=None, builderid=None): pass def test_signature_getWorkers(self): @self.assertArgSpecMatches(self.db.workers.getWorkers) def getWorkers(self, masterid=None, builderid=None, paused=None, graceful=None): pass def test_signature_workerConnected(self): @self.assertArgSpecMatches(self.db.workers.workerConnected) def workerConnected(self, workerid, masterid, workerinfo): pass def test_signature_workerDisconnected(self): @self.assertArgSpecMatches(self.db.workers.workerDisconnected) def workerDisconnected(self, workerid, masterid): pass def test_signature_workerConfigured(self): @self.assertArgSpecMatches(self.db.workers.workerConfigured) def workerConfigured(self, workerid, masterid, builderids): pass def test_signature_deconfigureAllWorkersForMaster(self): @self.assertArgSpecMatches(self.db.workers.deconfigureAllWorkersForMaster) def deconfigureAllWorkersForMaster(self, masterid): pass def test_signature_setWorkerState(self): @self.assertArgSpecMatches(self.db.workers.setWorkerState) def setWorkerState(self, workerid, paused, graceful): pass @defer.inlineCallbacks def test_findWorkerId_insert(self): id = yield self.db.workers.findWorkerId(name="xyz") worker = yield self.db.workers.getWorker(workerid=id) self.assertEqual(worker['name'], 'xyz') self.assertEqual(worker['workerinfo'], {}) @defer.inlineCallbacks def test_findWorkerId_existing(self): yield self.insertTestData(self.baseRows) id = yield self.db.workers.findWorkerId(name="one") self.assertEqual(id, 31) @defer.inlineCallbacks def test_getWorker_no_such(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(workerid=99) self.assertEqual(workerdict, None) @defer.inlineCallbacks def test_getWorker_by_name_no_such(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(name='NOSUCH') self.assertEqual(workerdict, None) @defer.inlineCallbacks def test_getWorker_not_configured(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[], configured_on=[])) @defer.inlineCallbacks def test_getWorker_connected_not_configured(self): yield self.insertTestData(self.baseRows + [ # the worker is connected to this master, but not configured. # weird, but the DB should represent it. fakedb.Worker(id=32, name='two'), fakedb.ConnectedWorker(workerid=32, masterid=11), ]) workerdict = yield self.db.workers.getWorker(workerid=32) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=32, name='two', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[11], configured_on=[])) @defer.inlineCallbacks def test_getWorker_multiple_connections(self): yield self.insertTestData(self.baseRows + [ # the worker is connected to two masters at once. # weird, but the DB should represent it. fakedb.Worker(id=32, name='two'), fakedb.ConnectedWorker(workerid=32, masterid=10), fakedb.ConnectedWorker(workerid=32, masterid=11), fakedb.BuilderMaster(id=24, builderid=20, masterid=10), fakedb.BuilderMaster(id=25, builderid=20, masterid=11), fakedb.ConfiguredWorker(workerid=32, buildermasterid=24), fakedb.ConfiguredWorker(workerid=32, buildermasterid=25), ]) workerdict = yield self.db.workers.getWorker(workerid=32) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=32, name='two', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[10, 11], configured_on=[ {'builderid': 20, 'masterid': 10}, {'builderid': 20, 'masterid': 11}, ])) @defer.inlineCallbacks def test_getWorker_by_name_not_configured(self): yield self.insertTestData(self.baseRows) workerdict = yield self.db.workers.getWorker(name='zero') validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, connected_to=[], configured_on=[])) @defer.inlineCallbacks def test_getWorker_not_connected(self): yield self.insertTestData(self.baseRows + [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.ConfiguredWorker(workerid=30, buildermasterid=12), ]) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[ {'masterid': 10, 'builderid': 20}], connected_to=[])) @defer.inlineCallbacks def test_getWorker_connected(self): yield self.insertTestData(self.baseRows + [ fakedb.BuilderMaster(id=12, builderid=20, masterid=10), fakedb.ConfiguredWorker(workerid=30, buildermasterid=12), fakedb.ConnectedWorker(workerid=30, masterid=10), ]) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[ {'masterid': 10, 'builderid': 20}], connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30) validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_builderid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, builderid=20) validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[ {'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorker_with_multiple_masters_builderid_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(workerid=30, builderid=20, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[ {'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorker_by_name_with_multiple_masters_builderid_masterid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdict = yield self.db.workers.getWorker(name='zero', builderid=20, masterid=11) validation.verifyDbDict(self, 'workerdict', workerdict) self.assertEqual(workerdict, dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[ {'masterid': 11, 'builderid': 20}, ], connected_to=[])) @defer.inlineCallbacks def test_getWorkers_no_config(self): yield self.insertTestData(self.baseRows) workerdicts = yield self.db.workers.getWorkers() [validation.verifyDbDict(self, 'workerdict', workerdict) for workerdict in workerdicts] self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[], connected_to=[]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=[], connected_to=[]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers() for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_empty(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=11, builderid=21) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), []) @defer.inlineCallbacks def test_getWorkers_with_config_builderid(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(builderid=20) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[10]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_10(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=10) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 10, 'builderid': 20}, {'masterid': 10, 'builderid': 21}, ], key=configuredOnKey), connected_to=[10]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_11(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers(masterid=11) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=30, name='zero', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, ], key=configuredOnKey), connected_to=[]), dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 20}, {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_config_masterid_11_builderid_22(self): yield self.insertTestData(self.baseRows + self.multipleMasters) workerdicts = yield self.db.workers.getWorkers( masterid=11, builderid=22) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = sorted( workerdict['configured_on'], key=configuredOnKey) self.assertEqual(sorted(workerdicts, key=workerKey), sorted([ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=False, configured_on=sorted([ {'masterid': 11, 'builderid': 22}, ], key=configuredOnKey), connected_to=[11]), ], key=workerKey)) @defer.inlineCallbacks def test_getWorkers_with_paused(self): yield self.insertTestData(self.baseRows + self.multipleMasters) yield self.db.workers.setWorkerState(31, paused=True, graceful=False) workerdicts = yield self.db.workers.getWorkers( paused=True) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = [] self.assertEqual(workerdicts, [ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=True, graceful=False, configured_on=[], connected_to=[11]), ]) @defer.inlineCallbacks def test_getWorkers_with_graceful(self): yield self.insertTestData(self.baseRows + self.multipleMasters) yield self.db.workers.setWorkerState(31, paused=False, graceful=True) workerdicts = yield self.db.workers.getWorkers( graceful=True) for workerdict in workerdicts: validation.verifyDbDict(self, 'workerdict', workerdict) workerdict['configured_on'] = [] self.assertEqual(workerdicts, [ dict(id=31, name='one', workerinfo={'a': 'b'}, paused=False, graceful=True, configured_on=[], connected_to=[11]), ]) @defer.inlineCallbacks def test_workerConnected_existing(self): yield self.insertTestData(self.baseRows + self.worker1_rows) NEW_INFO = {'other': [1, 2, 3]} yield self.db.workers.workerConnected( workerid=self.W1_ID, masterid=11, workerinfo=NEW_INFO) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': NEW_INFO, 'paused': False, 'graceful': False, 'configured_on': [], 'connected_to': [11]}) @defer.inlineCallbacks def test_workerConnected_already_connected(self): yield self.insertTestData(self.baseRows + self.worker1_rows + [ fakedb.ConnectedWorker(id=888, workerid=self.W1_ID, masterid=11), ]) yield self.db.workers.workerConnected( workerid=self.W1_ID, masterid=11, workerinfo={}) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], [11]) @defer.inlineCallbacks def test_workerDisconnected(self): yield self.insertTestData(self.baseRows + self.worker1_rows + [ fakedb.ConnectedWorker(id=888, workerid=self.W1_ID, masterid=10), fakedb.ConnectedWorker(id=889, workerid=self.W1_ID, masterid=11), ]) yield self.db.workers.workerDisconnected( workerid=self.W1_ID, masterid=11) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], [10]) @defer.inlineCallbacks def test_workerDisconnected_already_disconnected(self): yield self.insertTestData(self.baseRows + self.worker1_rows) yield self.db.workers.workerDisconnected( workerid=self.W1_ID, masterid=11) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w['connected_to'], []) @defer.inlineCallbacks def test_setWorkerState_existing(self): yield self.insertTestData(self.baseRows + self.worker1_rows) yield self.db.workers.setWorkerState( workerid=self.W1_ID, paused=False, graceful=True) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': self.W1_INFO, 'paused': False, 'graceful': True, 'configured_on': [], 'connected_to': []}) yield self.db.workers.setWorkerState( workerid=self.W1_ID, paused=True, graceful=False) w = yield self.db.workers.getWorker(self.W1_ID) self.assertEqual(w, { 'id': self.W1_ID, 'name': self.W1_NAME, 'workerinfo': self.W1_INFO, 'paused': True, 'graceful': False, 'configured_on': [], 'connected_to': []}) @defer.inlineCallbacks def test_workerConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on'], key=configuredOnKey), sorted([ {'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey)) @defer.inlineCallbacks def test_workerConfiguredTwice(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) # configure again (should eat the duplicate insertion errors) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 21, 22]) w = yield self.db.workers.getWorker(30) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 21, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey) self.assertEqual(x1, x2) @defer.inlineCallbacks def test_workerReConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[20, 22]) w = yield self.db.workers.getWorker(30) w['configured_on'] = sorted(w['configured_on'], key=configuredOnKey) self.assertEqual(w['configured_on'], sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 20, 'masterid': 10}, {'builderid': 22, 'masterid': 10}], key=configuredOnKey)) @defer.inlineCallbacks def test_workerReConfigured_should_not_affect_other_worker(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove all the builders in master 11 yield self.db.workers.workerConfigured( workerid=30, masterid=11, builderids=[]) w = yield self.db.workers.getWorker(30) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 10}, {'builderid': 21, 'masterid': 10}], key=configuredOnKey) self.assertEqual(x1, x2) # ensure worker 31 is not affected (see GitHub issue#3392) w = yield self.db.workers.getWorker(31) x1 = sorted(w['configured_on'], key=configuredOnKey) x2 = sorted([{'builderid': 20, 'masterid': 11}, {'builderid': 22, 'masterid': 11}], key=configuredOnKey) self.assertEqual(x1, x2) @defer.inlineCallbacks def test_workerUnconfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove all builders from master 10 yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[]) w = yield self.db.workers.getWorker(30) w['configured_on'] = sorted(w['configured_on'], key=configuredOnKey) expected = sorted([ {'builderid': 20, 'masterid': 11}], key=configuredOnKey) self.assertEqual(w['configured_on'], expected) @defer.inlineCallbacks def test_nothingConfigured(self): yield self.insertTestData(self.baseRows + self.multipleMasters) # should remove builder 21, and add 22 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) yield self.db.workers.workerConfigured( workerid=30, masterid=10, builderids=[]) # should only keep builder for master 11 w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), sorted([ {'builderid': 20, 'masterid': 11}])) @defer.inlineCallbacks def test_deconfiguredAllWorkers(self): yield self.insertTestData(self.baseRows + self.multipleMasters) res = yield self.db.workers.getWorkers(masterid=11) self.assertEqual(len(res), 2) # should remove all worker configured for masterid 11 yield self.db.workers.deconfigureAllWorkersForMaster(masterid=11) res = yield self.db.workers.getWorkers(masterid=11) self.assertEqual(len(res), 0) class RealTests(Tests): # tests that only "real" implementations will pass pass class TestFakeDB(TestReactorMixin, unittest.TestCase, Tests): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.db = fakedb.FakeDBConnector(self) yield self.db.setServiceParent(self.master) self.db.checkForeignKeys = True self.insertTestData = self.db.insertTestData class TestRealDB(unittest.TestCase, connector_component.ConnectorComponentMixin, RealTests, querylog.SqliteMaxVariableMixin): @defer.inlineCallbacks def setUp(self): yield self.setUpConnectorComponent( table_names=['workers', 'masters', 'builders', 'builder_masters', 'connected_workers', 'configured_workers']) self.db.workers = \ workers.WorkersConnectorComponent(self.db) @defer.inlineCallbacks def test_workerConfiguredMany(self): manyWorkers = [ fakedb.BuilderMaster(id=1000, builderid=20, masterid=10), ] + [ fakedb.Worker(id=50 + n, name='zero' + str(n)) for n in range(1000) ] + [ fakedb.ConfiguredWorker( id=n + 3000, workerid=50 + n, buildermasterid=1000) for n in range(1000) ] yield self.insertTestData(self.baseRows + manyWorkers) # should successfully remove all ConfiguredWorker rows with self.assertNoMaxVariables(): yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), []) @defer.inlineCallbacks def test_workerConfiguredManyBuilders(self): manyWorkers = [ fakedb.Builder(id=100 + n, name='a' + str(n)) for n in range(1000) ] + [ fakedb.Worker(id=50 + n, name='zero' + str(n)) for n in range(2000) ] + [ fakedb.BuilderMaster(id=1000 + n, builderid=100 + n, masterid=10) for n in range(1000) ] + [ fakedb.ConfiguredWorker( id=n + 3000, workerid=50 + n, buildermasterid=int(1000 + n / 2)) for n in range(2000) ] yield self.insertTestData(self.baseRows + manyWorkers) # should successfully remove all ConfiguredWorker rows with self.assertNoMaxVariables(): yield self.db.workers.deconfigureAllWorkersForMaster(masterid=10) w = yield self.db.workers.getWorker(30) self.assertEqual(sorted(w['configured_on']), []) def tearDown(self): return self.tearDownConnectorComponent() buildbot-2.6.0/master/buildbot/test/unit/test_download_secret_to_worker.py000066400000000000000000000133351361162603000272430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import SUCCESS from buildbot.steps.download_secret_to_worker import DownloadSecretsToWorker from buildbot.steps.download_secret_to_worker import RemoveWorkerFileSecret from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDownloadFileSecretToWorkerCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep( DownloadSecretsToWorker([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "something more")])) args1 = { 'maxsize': None, 'mode': stat.S_IRUSR | stat.S_IWUSR, 'reader': ExpectRemoteRef(remotetransfer.StringFileReader), 'blocksize': 32 * 1024, 'workerdest': os.path.join(self.temp_path, "pathA"), 'workdir': "wkdir" } args2 = { 'maxsize': None, 'mode': stat.S_IRUSR | stat.S_IWUSR, 'reader': ExpectRemoteRef(remotetransfer.StringFileReader), 'blocksize': 32 * 1024, 'workerdest': os.path.join(self.temp_path, "pathB"), 'workdir': "wkdir" } self.expectCommands( Expect('downloadFile', args1) + 0, Expect('downloadFile', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d class TestRemoveWorkerFileSecretCommand30(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep(RemoveWorkerFileSecret([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "somethingmore")]), worker_version={'*': '3.0'}) args1 = { 'path': os.path.join(self.temp_path, "pathA"), 'dir': os.path.abspath(os.path.join(self.temp_path, "pathA")), 'logEnviron': False } args2 = { 'path': os.path.join(self.temp_path, "pathB"), 'dir': os.path.abspath(os.path.join(self.temp_path, "pathB")), 'logEnviron': False } self.expectCommands( Expect('rmdir', args1) + 0, Expect('rmdir', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d class TestRemoveFileSecretToWorkerCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() tempdir = FilePath(self.mktemp()) tempdir.createDirectory() self.temp_path = tempdir.path return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testBasic(self): self.setupStep( RemoveWorkerFileSecret([(os.path.join(self.temp_path, "pathA"), "something"), (os.path.join(self.temp_path, "pathB"), "somethingmore")])) args1 = { 'path': os.path.join(self.temp_path, "pathA"), 'logEnviron': False } args2 = { 'path': os.path.join(self.temp_path, "pathB"), 'logEnviron': False } self.expectCommands( Expect('rmfile', args1) + 0, Expect('rmfile', args2) + 0, ) self.expectOutcome( result=SUCCESS, state_string="finished") d = self.runStep() return d buildbot-2.6.0/master/buildbot/test/unit/test_fake_httpclientservice.py000066400000000000000000000047631361162603000265260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util import httpclientservice from buildbot.util import service class myTestedService(service.BuildbotService): name = 'myTestedService' @defer.inlineCallbacks def reconfigService(self, baseurl): self._http = yield httpclientservice.HTTPClientService.getService(self.master, baseurl) @defer.inlineCallbacks def doGetRoot(self): res = yield self._http.get("/") # note that at this point, only the http response headers are received if res.code != 200: raise Exception("%d: server did not succeed" % (res.code)) res_json = yield res.json() # res.json() returns a deferred to represent the time needed to fetch the entire body return res_json class Test(unittest.TestCase): @defer.inlineCallbacks def setUp(self): baseurl = 'http://127.0.0.1:8080' self.parent = service.MasterService() self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.parent, self, baseurl) self.tested = myTestedService(baseurl) yield self.tested.setServiceParent(self.parent) yield self.parent.startService() @defer.inlineCallbacks def test_root(self): self._http.expect("get", "/", content_json={'foo': 'bar'}) response = yield self.tested.doGetRoot() self.assertEqual(response, {'foo': 'bar'}) @defer.inlineCallbacks def test_root_error(self): self._http.expect("get", "/", content_json={'foo': 'bar'}, code=404) try: yield self.tested.doGetRoot() except Exception as e: self.assertEqual(str(e), '404: server did not succeed') buildbot-2.6.0/master/buildbot/test/unit/test_fake_secrets_manager.py000066400000000000000000000111311361162603000261140ustar00rootroot00000000000000 from twisted.internet import defer from twisted.trial import unittest from buildbot.secrets.manager import SecretManager from buildbot.secrets.secret import SecretDetails from buildbot.test.fake import fakemaster from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.misc import TestReactorMixin class TestSecretsManager(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.master.config.secretsProviders = [FakeSecretStorage(secretdict={"foo": "bar", "other": "value"})] @defer.inlineCallbacks def testGetManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) secret_service_manager.services = [fakeStorageService] expectedClassName = FakeSecretStorage.__name__ expectedSecretDetail = SecretDetails(expectedClassName, "foo", "bar") secret_result = yield secret_service_manager.get("foo") strExpectedSecretDetail = str(secret_result) self.assertEqual(secret_result, expectedSecretDetail) self.assertEqual(secret_result.key, "foo") self.assertEqual(secret_result.value, "bar") self.assertEqual(secret_result.source, expectedClassName) self.assertEqual(strExpectedSecretDetail, "FakeSecretStorage foo: 'bar'") @defer.inlineCallbacks def testGetNoDataManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) secret_service_manager.services = [fakeStorageService] secret_result = yield secret_service_manager.get("foo2") self.assertEqual(secret_result, None) @defer.inlineCallbacks def testGetDataMultipleManagerService(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar", "other2": "value"}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] expectedSecretDetail = SecretDetails(FakeSecretStorage.__name__, "foo2", "bar") secret_result = yield secret_service_manager.get("foo2") self.assertEqual(secret_result, expectedSecretDetail) @defer.inlineCallbacks def testGetDataMultipleManagerValues(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": ""}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar2", "other": ""}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] expectedSecretDetail = SecretDetails(FakeSecretStorage.__name__, "other", "") secret_result = yield secret_service_manager.get("other") self.assertEqual(secret_result, expectedSecretDetail) @defer.inlineCallbacks def testGetDataMultipleManagerServiceNoDatas(self): secret_service_manager = SecretManager() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) otherFakeStorageService = FakeSecretStorage() otherFakeStorageService.reconfigService(secretdict={"foo2": "bar", "other2": "value"}) secret_service_manager.services = [fakeStorageService, otherFakeStorageService] secret_result = yield secret_service_manager.get("foo3") self.assertEqual(secret_result, None) buildbot-2.6.0/master/buildbot/test/unit/test_interpolate_secrets.py000066400000000000000000000063041361162603000260500ustar00rootroot00000000000000import gc from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.secrets.manager import SecretManager from buildbot.test.fake import fakemaster from buildbot.test.fake.fakebuild import FakeBuild from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class FakeBuildWithMaster(FakeBuild): def __init__(self, master): super(FakeBuildWithMaster, self).__init__() self.master = master class TestInterpolateSecrets(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:foo)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo bar") @defer.inlineCallbacks def test_secret_not_found(self): command = Interpolate("echo %(secret:fuo)s") yield self.assertFailure(self.build.render(command), defer.FirstError) gc.collect() self.flushLoggedErrors(defer.FirstError) self.flushLoggedErrors(KeyError) class TestInterpolateSecretsNoService(TestReactorMixin, unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:fuo)s") yield self.assertFailure(self.build.render(command), defer.FirstError) gc.collect() self.flushLoggedErrors(defer.FirstError) self.flushLoggedErrors(KeyError) class TestInterpolateSecretsHiddenSecrets(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"foo": "bar", "other": "value"}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.build = FakeBuildWithMaster(self.master) @defer.inlineCallbacks def test_secret(self): command = Interpolate("echo %(secret:foo)s") rendered = yield self.build.render(command) cleantext = self.build.build_status.properties.cleanupTextFromSecrets(rendered) self.assertEqual(cleantext, "echo ") buildbot-2.6.0/master/buildbot/test/unit/test_janitor_configurator.py000066400000000000000000000060531361162603000262230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from datetime import timedelta import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.configurators import janitor from buildbot.configurators.janitor import JANITOR_NAME from buildbot.configurators.janitor import JanitorConfigurator from buildbot.configurators.janitor import LogChunksJanitor from buildbot.process.results import SUCCESS from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.timed import Nightly from buildbot.test.util import config as configmixin from buildbot.test.util import configurators from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util import datetime2epoch from buildbot.worker.local import LocalWorker class JanitorConfiguratorTests(configurators.ConfiguratorMixin, unittest.SynchronousTestCase): ConfiguratorClass = JanitorConfigurator def test_nothing(self): self.setupConfigurator() self.assertEqual(self.config_dict, { }) def test_basic(self): self.setupConfigurator(logHorizon=timedelta(weeks=1)) self.expectWorker(JANITOR_NAME, LocalWorker) self.expectScheduler(JANITOR_NAME, Nightly) self.expectScheduler(JANITOR_NAME + "_force", ForceScheduler) self.expectBuilderHasSteps(JANITOR_NAME, [LogChunksJanitor]) self.expectNoConfigError() class LogChunksJanitorTests(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() self.patch(janitor, "now", lambda: datetime.datetime(year=2017, month=1, day=1)) def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_basic(self): self.setupStep( LogChunksJanitor(logHorizon=timedelta(weeks=1))) self.master.db.logs.deleteOldLogChunks = mock.Mock(return_value=3) self.expectOutcome(result=SUCCESS, state_string="deleted 3 logchunks") yield self.runStep() expected_timestamp = datetime2epoch(datetime.datetime(year=2016, month=12, day=25)) self.master.db.logs.deleteOldLogChunks.assert_called_with(expected_timestamp) buildbot-2.6.0/master/buildbot/test/unit/test_locks.py000066400000000000000000000513321361162603000231060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.locks import BaseLock from buildbot.locks import LockAccess from buildbot.locks import MasterLock from buildbot.locks import RealMasterLock from buildbot.locks import RealWorkerLock from buildbot.locks import WorkerLock from buildbot.util.eventual import flushEventualQueue class Requester: pass class BaseLockTests(unittest.TestCase): @parameterized.expand(['counting', 'exclusive']) def test_is_available_empty(self, mode): req = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode self.assertTrue(lock.isAvailable(req, access)) @parameterized.expand(['counting', 'exclusive']) def test_is_available_without_waiter(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) lock.release(req, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) @parameterized.expand(['counting', 'exclusive']) def test_is_available_with_waiter(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) lock.claim(req_waiter, access) lock.release(req_waiter, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) @parameterized.expand(['counting', 'exclusive']) def test_is_available_with_multiple_waiters(self, mode): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertFalse(lock.isAvailable(req, access)) self.assertFalse(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) def test_is_available_with_multiple_waiters_multiple_counting(self): req1 = Requester() req2 = Requester() req_waiter1 = Requester() req_waiter2 = Requester() req_waiter3 = Requester() lock = BaseLock('test', maxCount=2) access = mock.Mock(spec=LockAccess) access.mode = 'counting' lock.claim(req1, access) lock.claim(req2, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.waitUntilMaybeAvailable(req_waiter3, access) lock.release(req1, access) lock.release(req2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertFalse(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertFalse(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter3, access) lock.release(req_waiter3, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) def test_is_available_with_mult_waiters_mult_counting_set_maxCount(self): req1 = Requester() req2 = Requester() req_waiter1 = Requester() req_waiter2 = Requester() req_waiter3 = Requester() lock = BaseLock('test', maxCount=2) access = mock.Mock(spec=LockAccess) access.mode = 'counting' lock.claim(req1, access) lock.claim(req2, access) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.waitUntilMaybeAvailable(req_waiter2, access) lock.waitUntilMaybeAvailable(req_waiter3, access) lock.release(req1, access) lock.release(req2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertFalse(lock.isAvailable(req_waiter3, access)) lock.setMaxCount(4) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.setMaxCount(2) lock.waitUntilMaybeAvailable(req_waiter1, access) lock.claim(req_waiter2, access) lock.release(req_waiter2, access) self.assertFalse(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter3, access) lock.release(req_waiter3, access) self.assertTrue(lock.isAvailable(req1, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) self.assertTrue(lock.isAvailable(req_waiter3, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) @parameterized.expand(['counting', 'exclusive']) def test_duplicate_wait_until_maybe_available_throws(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) with self.assertRaises(AssertionError): lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) @parameterized.expand(['counting', 'exclusive']) def test_stop_waiting_ensures_deferred_was_previous_result_of_wait(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) lock.waitUntilMaybeAvailable(req_waiter, access) with self.assertRaises(AssertionError): wrong_d = defer.Deferred() lock.stopWaitingUntilAvailable(req_waiter, access, wrong_d) lock.release(req, access) @parameterized.expand(['counting', 'exclusive']) def test_stop_waiting_fires_deferred_if_not_woken(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.stopWaitingUntilAvailable(req_waiter, access, d) self.assertTrue(d.called) lock.release(req, access) @parameterized.expand(['counting', 'exclusive']) @defer.inlineCallbacks def test_stop_waiting_does_not_fire_deferred_if_already_woken(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) yield flushEventualQueue() self.assertTrue(d.called) # note that if the function calls the deferred again, an exception would be thrown from # inside Twisted. lock.stopWaitingUntilAvailable(req_waiter, access, d) @parameterized.expand(['counting', 'exclusive']) def test_stop_waiting_does_not_raise_after_release(self, mode): req = Requester() req_waiter = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) d = lock.waitUntilMaybeAvailable(req_waiter, access) lock.release(req, access) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter, access)) lock.stopWaitingUntilAvailable(req_waiter, access, d) lock.claim(req_waiter, access) lock.release(req_waiter, access) @parameterized.expand(['counting', 'exclusive']) def test_stop_waiting_removes_non_called_waiter(self, mode): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) d1 = lock.waitUntilMaybeAvailable(req_waiter1, access) d2 = lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) yield flushEventualQueue() self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) self.assertTrue(d1.called) lock.stopWaitingUntilAvailable(req_waiter2, access, d2) self.assertFalse(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertFalse(lock.isAvailable(req_waiter2, access)) lock.claim(req_waiter1, access) lock.release(req_waiter1, access) self.assertTrue(lock.isAvailable(req, access)) self.assertTrue(lock.isAvailable(req_waiter1, access)) self.assertTrue(lock.isAvailable(req_waiter2, access)) @parameterized.expand(['counting', 'exclusive']) @defer.inlineCallbacks def test_stop_waiting_wakes_up_next_deferred_if_already_woken(self, mode): req = Requester() req_waiter1 = Requester() req_waiter2 = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.claim(req, access) d1 = lock.waitUntilMaybeAvailable(req_waiter1, access) d2 = lock.waitUntilMaybeAvailable(req_waiter2, access) lock.release(req, access) yield flushEventualQueue() self.assertTrue(d1.called) self.assertFalse(d2.called) lock.stopWaitingUntilAvailable(req_waiter1, access, d1) yield flushEventualQueue() self.assertTrue(d2.called) @parameterized.expand(['counting', 'exclusive']) def test_can_release_non_waited_lock(self, mode): req = Requester() req_not_waited = Requester() lock = BaseLock('test', maxCount=1) access = mock.Mock(spec=LockAccess) access.mode = mode lock.release(req_not_waited, access) lock.claim(req, access) lock.release(req, access) yield flushEventualQueue() lock.release(req_not_waited, access) @parameterized.expand([ ('counting', 'counting'), ('counting', 'exclusive'), ('exclusive', 'counting'), ('exclusive', 'exclusive'), ]) @defer.inlineCallbacks def test_release_calls_waiters_in_fifo_order(self, mode1, mode2): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=1) access1 = mock.Mock(spec=LockAccess) access1.mode = mode1 access2 = mock.Mock(spec=LockAccess) access2.mode = mode2 accesses = [access1, access2, access1, access2, access1] expected_called = [False] * 5 lock.claim(req, access1) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access) for req_waiter, access in zip(req_waiters, accesses)] self.assertEqual([d.called for d in deferreds], expected_called) lock.release(req, access1) yield flushEventualQueue() expected_called[0] = True self.assertEqual([d.called for d in deferreds], expected_called) for i in range(4): self.assertTrue(lock.isAvailable(req_waiters[i], accesses[i])) lock.claim(req_waiters[i], accesses[i]) self.assertEqual([d.called for d in deferreds], expected_called) lock.release(req_waiters[i], accesses[i]) yield flushEventualQueue() expected_called[i + 1] = True self.assertEqual([d.called for d in deferreds], expected_called) lock.claim(req_waiters[4], accesses[4]) lock.release(req_waiters[4], accesses[4]) @defer.inlineCallbacks def test_release_calls_multiple_waiters_on_release(self): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=5) access_counting = mock.Mock(spec=LockAccess) access_counting.mode = 'counting' access_excl = mock.Mock(spec=LockAccess) access_excl.mode = 'exclusive' lock.claim(req, access_excl) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access_counting) for req_waiter in req_waiters] self.assertEqual([d.called for d in deferreds], [False] * 5) lock.release(req, access_excl) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] * 5) @defer.inlineCallbacks def test_release_calls_multiple_waiters_on_setMaxCount(self): req = Requester() req_waiters = [Requester() for _ in range(5)] lock = BaseLock('test', maxCount=1) access_counting = mock.Mock(spec=LockAccess) access_counting.mode = 'counting' lock.claim(req, access_counting) deferreds = [lock.waitUntilMaybeAvailable(req_waiter, access_counting) for req_waiter in req_waiters] self.assertEqual([d.called for d in deferreds], [False] * 5) lock.release(req, access_counting) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] + [False] * 4) lock.setMaxCount(5) yield flushEventualQueue() self.assertEqual([d.called for d in deferreds], [True] * 5) class RealLockTests(unittest.TestCase): def test_master_lock_init_from_lockid(self): lock = RealMasterLock('lock1') lock.updateFromLockId(MasterLock('lock1', maxCount=3), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 3) self.assertEqual(lock.description, '') def test_master_lock_update_from_lockid(self): lock = RealMasterLock('lock1') lock.updateFromLockId(MasterLock('lock1', maxCount=3), 0) lock.updateFromLockId(MasterLock('lock1', maxCount=4), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 4) self.assertEqual(lock.description, '') with self.assertRaises(AssertionError): lock.updateFromLockId(MasterLock('lock2', maxCount=4), 0) def test_worker_lock_init_from_lockid(self): lock = RealWorkerLock('lock1') lock.updateFromLockId(WorkerLock('lock1', maxCount=3), 0) self.assertEqual(lock.lockName, 'lock1') self.assertEqual(lock.maxCount, 3) self.assertEqual(lock.description, '') worker_lock = lock.getLockForWorker('worker1') self.assertEqual(worker_lock.lockName, 'lock1') self.assertEqual(worker_lock.maxCount, 3) self.assertTrue(worker_lock.description.startswith( '') self.assertEqual(worker_lock.lockName, 'lock1') self.assertEqual(worker_lock.maxCount, 5) self.assertTrue(worker_lock.description.startswith( ' crossbar init > crossbar start & > export WAMP_ROUTER_URL=ws://localhost:8080/ws > trial buildbot.unit.test_mq_wamp""") # if connection is bad, this test can timeout easily # we reduce the timeout to help maintain the sanity of the developer timeout = 2 @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() if "WAMP_ROUTER_URL" not in os.environ: raise unittest.SkipTest(self.HOW_TO_RUN) self.master = fakemaster.make_master(self) self.mq = wamp.WampMQ() yield self.mq.setServiceParent(self.master) self.connector = self.master.wamp = connector.WampConnector() yield self.connector.setServiceParent(self.master) yield self.master.startService() config = FakeConfig() config.mq['router_url'] = os.environ["WAMP_ROUTER_URL"] yield self.connector.reconfigServiceWithBuildbotConfig(config) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_forward_data(self): d = defer.Deferred() callback = mock.Mock(side_effect=lambda *a, **kw: d.callback(None)) yield self.mq.startConsuming(callback, ('a', 'b')) # _produce returns a deferred yield self.mq._produce(('a', 'b'), 'foo') # calling produce should eventually call the callback with decoding of # topic yield d callback.assert_called_with(('a', 'b'), 'foo') @defer.inlineCallbacks def test_forward_data_wildcard(self): d = defer.Deferred() callback = mock.Mock(side_effect=lambda *a, **kw: d.callback(None)) yield self.mq.startConsuming(callback, ('a', None)) # _produce returns a deferred yield self.mq._produce(('a', 'b'), 'foo') # calling produce should eventually call the callback with decoding of # topic yield d callback.assert_called_with(('a', 'b'), 'foo') buildbot-2.6.0/master/buildbot/test/unit/test_pbmanager.py000066400000000000000000000124511361162603000237260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Test clean shutdown functionality of the master """ import mock from twisted.cred import credentials from twisted.internet import defer from twisted.spread import pb from twisted.trial import unittest from buildbot import pbmanager class FakeMaster: initLock = defer.DeferredLock() def addService(self, svc): pass @property def master(self): return self class TestPBManager(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.pbm = pbmanager.PBManager() yield self.pbm.setServiceParent(FakeMaster()) self.pbm.startService() self.connections = [] def tearDown(self): return self.pbm.stopService() def perspectiveFactory(self, mind, username): persp = mock.Mock() persp.is_my_persp = True persp.attached = lambda mind: defer.succeed(None) self.connections.append(username) return defer.succeed(persp) @defer.inlineCallbacks def test_repr(self): reg = yield self.pbm.register( 'tcp:0:interface=127.0.0.1', "x", "y", self.perspectiveFactory) self.assertEqual(repr(self.pbm.dispatchers['tcp:0:interface=127.0.0.1']), '') self.assertEqual( repr(reg), '') @defer.inlineCallbacks def test_register_unregister(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) # make sure things look right self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertIn('boris', disp.users) # we can't actually connect to it, as that requires finding the # dynamically allocated port number which is buried out of reach; # however, we can try the requestAvatar and requestAvatarId methods. username = yield disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertEqual(username, b'boris') avatar = yield disp.requestAvatar(b'boris', mock.Mock(), pb.IPerspective) (iface, persp, detach_fn) = avatar self.assertTrue(persp.is_my_persp) self.assertIn('boris', self.connections) yield reg.unregister() @defer.inlineCallbacks def test_double_register_unregister(self): portstr = "tcp:0:interface=127.0.0.1" reg1 = yield self.pbm.register(portstr, "boris", "pass", None) reg2 = yield self.pbm.register(portstr, "ivona", "pass", None) # make sure things look right self.assertEqual(len(self.pbm.dispatchers), 1) self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertIn('boris', disp.users) self.assertIn('ivona', disp.users) yield reg1.unregister() self.assertEqual(len(self.pbm.dispatchers), 1) self.assertIn(portstr, self.pbm.dispatchers) disp = self.pbm.dispatchers[portstr] self.assertNotIn('boris', disp.users) self.assertIn('ivona', disp.users) yield reg2.unregister() self.assertEqual(len(self.pbm.dispatchers), 0) @defer.inlineCallbacks def test_requestAvatarId_noinitLock(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) disp = self.pbm.dispatchers[portstr] d = disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertTrue(d.called, "requestAvatarId should have been called since the lock is free") yield reg.unregister() @defer.inlineCallbacks def test_requestAvatarId_initLock(self): portstr = "tcp:0:interface=127.0.0.1" reg = yield self.pbm.register(portstr, "boris", "pass", self.perspectiveFactory) disp = self.pbm.dispatchers[portstr] try: # simulate a reconfig/restart in progress yield self.pbm.master.initLock.acquire() # try to authenticate while the lock is locked d = disp.requestAvatarId(credentials.UsernamePassword(b'boris', b'pass')) self.assertFalse(d.called, "requestAvatarId should block until the lock is released") finally: # release the lock, it should allow for auth to proceed yield self.pbm.master.initLock.release() self.assertTrue(d.called, "requestAvatarId should have been called after the lock was released") yield reg.unregister() buildbot-2.6.0/master/buildbot/test/unit/test_plugins.py000066400000000000000000000211141361162603000234470ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Unit tests for the plugin framework """ import mock from twisted.trial import unittest from zope.interface import implementer import buildbot.plugins.db from buildbot.errors import PluginDBError from buildbot.interfaces import IPlugin # buildbot.plugins.db needs to be imported for patching, however just 'db' is # much shorter for using in tests db = buildbot.plugins.db class FakeEntry: """ An entry suitable for unit tests """ def __init__(self, name, project_name, version, fail_require, value): self._name = name self._dist = mock.Mock(spec_set=['project_name', 'version']) self._dist.project_name = project_name self._dist.version = version self._fail_require = fail_require self._value = value @property def name(self): "entry name" return self._name @property def dist(self): "dist thingie" return self._dist def require(self): """ handle external dependencies """ if self._fail_require: raise RuntimeError('Fail require as requested') def load(self): """ handle loading """ return self._value class ITestInterface(IPlugin): """ test interface """ def hello(name): "Greets by :param:`name`" @implementer(ITestInterface) class ClassWithInterface: """ a class to implement a simple interface """ def __init__(self, name=None): self._name = name def hello(self, name=None): 'implement the required method' return name or self._name class ClassWithNoInterface: """ just a class """ # NOTE: buildbot.plugins.db prepends the group with common namespace -- # 'buildbot.' _FAKE_ENTRIES = { 'buildbot.interface': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithInterface), FakeEntry('deep.path', 'non-existent', 'irrelevant', False, ClassWithInterface) ], 'buildbot.interface_failed': [ FakeEntry('good', 'non-existent', 'irrelevant', True, ClassWithInterface) ], 'buildbot.no_interface': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithNoInterface) ], 'buildbot.no_interface_again': [ FakeEntry('good', 'non-existent', 'irrelevant', False, ClassWithNoInterface) ], 'buildbot.no_interface_failed': [ FakeEntry('good', 'non-existent', 'irrelevant', True, ClassWithNoInterface) ], 'buildbot.duplicates': [ FakeEntry('good', 'non-existent', 'first', False, ClassWithNoInterface), FakeEntry('good', 'non-existent', 'second', False, ClassWithNoInterface) ] } def provide_fake_entries(group): """ give a set of fake entries for known groups """ return _FAKE_ENTRIES.get(group, []) @mock.patch('buildbot.plugins.db.iter_entry_points', provide_fake_entries) class TestBuildbotPlugins(unittest.TestCase): def setUp(self): buildbot.plugins.db._DB = buildbot.plugins.db._PluginDB() def test_check_group_registration(self): with mock.patch.object(buildbot.plugins.db, '_DB', db._PluginDB()): # The groups will be prepended with namespace, so info() will # return a dictionary with right keys, but no data groups = set(_FAKE_ENTRIES.keys()) for group in groups: db.get_plugins(group) registered = set(db.info().keys()) self.assertEqual(registered, groups) self.assertEqual(registered, set(db.namespaces())) def test_interface_provided_simple(self): # Basic check before the actual test self.assertTrue(ITestInterface.implementedBy(ClassWithInterface)) plugins = db.get_plugins('interface', interface=ITestInterface) self.assertTrue('good' in plugins.names) result_get = plugins.get('good') result_getattr = plugins.good self.assertFalse(result_get is None) self.assertTrue(result_get is result_getattr) # Make sure we actually got our class greeter = result_get('yes') self.assertEqual('yes', greeter.hello()) self.assertEqual('no', greeter.hello('no')) def test_missing_plugin(self): plugins = db.get_plugins('interface', interface=ITestInterface) with self.assertRaises(AttributeError): getattr(plugins, 'bad') with self.assertRaises(PluginDBError): plugins.get('bad') with self.assertRaises(PluginDBError): plugins.get('good.extra') def test_interface_provided_deep(self): # Basic check before the actual test self.assertTrue(ITestInterface.implementedBy(ClassWithInterface)) plugins = db.get_plugins('interface', interface=ITestInterface) self.assertTrue('deep.path' in plugins.names) self.assertTrue('deep.path' in plugins) self.assertFalse('even.deeper.path' in plugins) result_get = plugins.get('deep.path') result_getattr = plugins.deep.path self.assertFalse(result_get is None) self.assertTrue(result_get is result_getattr) # Make sure we actually got our class greeter = result_get('yes') self.assertEqual('yes', greeter.hello()) self.assertEqual('no', greeter.hello('no')) def test_interface_provided_deps_failed(self): plugins = db.get_plugins('interface_failed', interface=ITestInterface, check_extras=True) with self.assertRaises(PluginDBError): plugins.get('good') def test_required_interface_not_provided(self): plugins = db.get_plugins('no_interface_again', interface=ITestInterface) self.assertTrue(plugins._interface is ITestInterface) with self.assertRaises(PluginDBError): plugins.get('good') def test_no_interface_provided(self): plugins = db.get_plugins('no_interface') self.assertFalse(plugins.get('good') is None) def test_no_interface_provided_deps_failed(self): plugins = db.get_plugins('no_interface_failed', check_extras=True) with self.assertRaises(PluginDBError): plugins.get('good') def test_failure_on_dups(self): with self.assertRaises(PluginDBError): db.get_plugins('duplicates', load_now=True) def test_get_info_on_a_known_plugin(self): plugins = db.get_plugins('interface') self.assertEqual(('non-existent', 'irrelevant'), plugins.info('good')) def test_failure_on_unknown_plugin_info(self): plugins = db.get_plugins('interface') with self.assertRaises(PluginDBError): plugins.info('bad') def test_failure_on_unknown_plugin_get(self): plugins = db.get_plugins('interface') with self.assertRaises(PluginDBError): plugins.get('bad') class SimpleFakeEntry(FakeEntry): def __init__(self, name, value): super().__init__(name, 'non-existent', 'irrelevant', False, value) _WORKER_FAKE_ENTRIES = { 'buildbot.worker': [ SimpleFakeEntry('Worker', ClassWithInterface), SimpleFakeEntry('EC2LatentWorker', ClassWithInterface), SimpleFakeEntry('LibVirtWorker', ClassWithInterface), SimpleFakeEntry('OpenStackLatentWorker', ClassWithInterface), SimpleFakeEntry('newthirdparty', ClassWithInterface), SimpleFakeEntry('deep.newthirdparty', ClassWithInterface), ], 'buildbot.util': [ SimpleFakeEntry('WorkerLock', ClassWithInterface), SimpleFakeEntry('enforceChosenWorker', ClassWithInterface), SimpleFakeEntry('WorkerChoiceParameter', ClassWithInterface), ], } def provide_worker_fake_entries(group): """ give a set of fake entries for known groups """ return _WORKER_FAKE_ENTRIES.get(group, []) buildbot-2.6.0/master/buildbot/test/unit/test_process_botmaster_BotMaster.py000066400000000000000000000174731361162603000275210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import factory from buildbot.process.botmaster import BotMaster from buildbot.process.results import CANCELLED from buildbot.process.results import RETRY from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestCleanShutdown(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self.master) self.botmaster.startService() def assertReactorStopped(self, _=None): self.assertTrue(self.reactor.stop_called) def assertReactorNotStopped(self, _=None): self.assertFalse(self.reactor.stop_called) def makeFakeBuild(self, waitedFor=False): self.fake_builder = builder = mock.Mock() build_status = mock.Mock() builder.builder_status.getCurrentBuilds.return_value = [build_status] self.build_deferred = defer.Deferred() request = mock.Mock() request.waitedFor = waitedFor build = mock.Mock() build.stopBuild = self.stopFakeBuild build.waitUntilFinished.return_value = self.build_deferred build.requests = [request] builder.building = [build] self.botmaster.builders = mock.Mock() self.botmaster.builders.values.return_value = [builder] def stopFakeBuild(self, reason, results): self.reason = reason self.results = results self.finishFakeBuild() def finishFakeBuild(self): self.fake_builder.building = [] self.build_deferred.callback(None) # tests def test_shutdown_idle(self): """Test that the master shuts down when it's idle""" self.botmaster.cleanShutdown() self.assertReactorStopped() def test_shutdown_busy(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild() self.botmaster.cleanShutdown() # check that we haven't stopped yet, since there's a running build self.assertReactorNotStopped() # try to shut it down again, just to check that this does not fail self.botmaster.cleanShutdown() # Now we cause the build to finish self.finishFakeBuild() # And now we should be stopped self.assertReactorStopped() def test_shutdown_busy_quick(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild() self.botmaster.cleanShutdown(quickMode=True) # And now we should be stopped self.assertReactorStopped() self.assertEqual(self.results, RETRY) def test_shutdown_busy_quick_cancelled(self): """Test that the master shuts down after builds finish""" self.makeFakeBuild(waitedFor=True) self.botmaster.cleanShutdown(quickMode=True) # And now we should be stopped self.assertReactorStopped() self.assertEqual(self.results, CANCELLED) def test_shutdown_cancel_not_shutting_down(self): """Test that calling cancelCleanShutdown when none is in progress works""" # this just shouldn't fail.. self.botmaster.cancelCleanShutdown() def test_shutdown_cancel(self): """Test that we can cancel a shutdown""" self.makeFakeBuild() self.botmaster.cleanShutdown() # Next we check that we haven't stopped yet, since there's a running # build. self.assertReactorNotStopped() # but the BuildRequestDistributor should not be running self.assertFalse(self.botmaster.brd.running) # Cancel the shutdown self.botmaster.cancelCleanShutdown() # Now we cause the build to finish self.finishFakeBuild() # We should still be running! self.assertReactorNotStopped() # and the BuildRequestDistributor should be, as well self.assertTrue(self.botmaster.brd.running) class TestBotMaster(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True) self.master.mq = self.master.mq self.master.botmaster.disownServiceParent() self.botmaster = BotMaster() yield self.botmaster.setServiceParent(self.master) self.new_config = mock.Mock() self.botmaster.startService() def tearDown(self): return self.botmaster.stopService() @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # check that reconfigServiceBuilders is called. self.patch(self.botmaster, 'reconfigServiceBuilders', mock.Mock(side_effect=lambda c: defer.succeed(None))) self.patch(self.botmaster, 'maybeStartBuildsForAllBuilders', mock.Mock()) new_config = mock.Mock() yield self.botmaster.reconfigServiceWithBuildbotConfig(new_config) self.botmaster.reconfigServiceBuilders.assert_called_with( new_config) self.assertTrue( self.botmaster.maybeStartBuildsForAllBuilders.called) @defer.inlineCallbacks def test_reconfigServiceBuilders_add_remove(self): bc = config.BuilderConfig(name='bldr', factory=factory.BuildFactory(), workername='f') self.new_config.builders = [bc] yield self.botmaster.reconfigServiceBuilders(self.new_config) bldr = self.botmaster.builders['bldr'] self.assertIdentical(bldr.parent, self.botmaster) self.assertIdentical(bldr.master, self.master) self.assertEqual(self.botmaster.builderNames, ['bldr']) self.new_config.builders = [] yield self.botmaster.reconfigServiceBuilders(self.new_config) self.assertIdentical(bldr.parent, None) self.assertIdentical(bldr.master, None) self.assertEqual(self.botmaster.builders, {}) self.assertEqual(self.botmaster.builderNames, []) def test_maybeStartBuildsForBuilder(self): brd = self.botmaster.brd = mock.Mock() self.botmaster.maybeStartBuildsForBuilder('frank') brd.maybeStartBuildsOn.assert_called_once_with(['frank']) def test_maybeStartBuildsForWorker(self): brd = self.botmaster.brd = mock.Mock() b1 = mock.Mock(name='frank') b1.name = 'frank' b2 = mock.Mock(name='larry') b2.name = 'larry' self.botmaster.getBuildersForWorker = mock.Mock(return_value=[b1, b2]) self.botmaster.maybeStartBuildsForWorker('centos') self.botmaster.getBuildersForWorker.assert_called_once_with('centos') brd.maybeStartBuildsOn.assert_called_once_with(['frank', 'larry']) def test_maybeStartBuildsForAll(self): brd = self.botmaster.brd = mock.Mock() self.botmaster.builderNames = ['frank', 'larry'] self.botmaster.maybeStartBuildsForAllBuilders() brd.maybeStartBuildsOn.assert_called_once_with(['frank', 'larry']) buildbot-2.6.0/master/buildbot/test/unit/test_process_build.py000066400000000000000000001130611361162603000246260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import operator import posixpath from mock import Mock from mock import call from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.locks import WorkerLock from buildbot.process.build import Build from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import LoggingBuildStep from buildbot.process.metrics import MetricLogObserver from buildbot.process.properties import Properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.fake import worker from buildbot.test.fake.fakebuild import FakeBuildStatus from buildbot.test.util.misc import TestReactorMixin class FakeChange: def __init__(self, number=None): self.properties = Properties() self.number = number self.who = "me" class FakeSource: def __init__(self): self.sourcestampsetid = None self.changes = [] self.branch = None self.revision = None self.repository = '' self.codebase = '' self.project = '' self.patch_info = None self.patch = None def getRepository(self): return self.repository class FakeRequest: def __init__(self): self.sources = [] self.reason = "Because" self.properties = Properties() self.id = 9385 def mergeSourceStampsWith(self, others): return self.sources def mergeReasons(self, others): return self.reason class FakeBuildStep(BuildStep): def __init__(self): super().__init__(haltOnFailure=False, flunkOnWarnings=False, flunkOnFailure=True, warnOnWarnings=True, warnOnFailure=False, alwaysRun=False, name='fake') self._summary = {'step': 'result', 'build': 'build result'} self._expected_results = SUCCESS def run(self): return self._expected_results def getResultSummary(self): return self._summary def interrupt(self, reason): self.running = False self.interrupted = reason class FakeBuilder: def __init__(self, master): self.config = Mock() self.config.workerbuilddir = 'wbd' self.name = 'fred' self.master = master self.botmaster = master.botmaster self.builderid = 83 self._builders = {} self.config_version = 0 def getBuilderId(self): return defer.succeed(self.builderid) def setupProperties(self, props): pass def buildFinished(self, build, workerforbuilder): pass def getBuilderIdForName(self, name): return defer.succeed(self._builders.get(name, None) or self.builderid) @implementer(interfaces.IBuildStepFactory) class FakeStepFactory: """Fake step factory that just returns a fixed step object.""" def __init__(self, step): self.step = step def buildStep(self): return self.step class TestException(Exception): pass @implementer(interfaces.IBuildStepFactory) class FailingStepFactory: """Fake step factory that just returns a fixed step object.""" def buildStep(self): raise TestException("FailingStepFactory") class _StepController(): def __init__(self, step): self._step = step def finishStep(self, result): self._step._deferred.callback(result) class _ControllableStep(BuildStep): def __init__(self): super().__init__() self._deferred = defer.Deferred() def run(self): return self._deferred def makeControllableStepFactory(): step = _ControllableStep() controller = _StepController(step) return controller, FakeStepFactory(step) class TestBuild(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() r = FakeRequest() r.sources = [FakeSource()] r.sources[0].changes = [FakeChange()] r.sources[0].revision = "12345" self.request = r self.master = fakemaster.make_master(self, wantData=True) self.worker = worker.FakeWorker(self.master) self.worker.attached(None) self.builder = FakeBuilder(self.master) self.build = Build([r]) self.build.conn = fakeprotocol.FakeConnection(self.master, self.worker) self.workerforbuilder = Mock(name='workerforbuilder') self.workerforbuilder.worker = self.worker self.workerforbuilder.prepare = lambda _: True self.workerforbuilder.ping = lambda: True self.build.setBuilder(self.builder) self.build.text = [] self.build.buildid = 666 def assertWorkerPreparationFailure(self, reason): states = "".join(self.master.data.updates.stepStateString.values()) self.assertIn(states, reason) def testRunSuccessfulBuild(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) def testStopBuild(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) def startStep(*args, **kw): # Now interrupt the build b.stopBuild("stop it") return defer.Deferred() step.startStep = startStep b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, CANCELLED) self.assertIn('stop it', step.interrupted) def testBuildRetryWhenWorkerPrepareReturnFalse(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) self.workerforbuilder.prepare = lambda _: False b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, RETRY) self.assertWorkerPreparationFailure('error while worker_prepare') def testBuildCancelledWhenWorkerPrepareReturnFalseBecauseBuildStop(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) d = defer.Deferred() self.workerforbuilder.prepare = lambda _: d b.startBuild(FakeBuildStatus(), self.workerforbuilder) b.stopBuild('Cancel Build', CANCELLED) d.callback(False) self.assertEqual(b.results, CANCELLED) self.assertWorkerPreparationFailure('error while worker_prepare') def testBuildRetryWhenWorkerPrepareReturnFalseBecauseBuildStop(self): b = self.build step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) d = defer.Deferred() self.workerforbuilder.prepare = lambda _: d b.startBuild(FakeBuildStatus(), self.workerforbuilder) b.stopBuild('Cancel Build', RETRY) d.callback(False) self.assertEqual(b.results, RETRY) self.assertWorkerPreparationFailure('error while worker_prepare') @defer.inlineCallbacks def testAlwaysRunStepStopBuild(self): """Test that steps marked with alwaysRun=True still get run even if the build is stopped.""" # Create a build with 2 steps, the first one will get interrupted, and # the second one is marked with alwaysRun=True b = self.build step1 = FakeBuildStep() step1.alwaysRun = False step1.results = None step2 = FakeBuildStep() step2.alwaysRun = True step2.results = None b.setStepFactories([ FakeStepFactory(step1), FakeStepFactory(step2), ]) def startStep1(*args, **kw): # Now interrupt the build b.stopBuild("stop it") return defer.succeed(SUCCESS) step1.startStep = startStep1 step1.stepDone = lambda: False step2Started = [False] def startStep2(*args, **kw): step2Started[0] = True return defer.succeed(SUCCESS) step2.startStep = startStep2 step1.stepDone = lambda: False yield b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, CANCELLED) self.assertIn('stop it', step1.interrupted) self.assertTrue(step2Started[0]) @defer.inlineCallbacks def testBuild_canAcquireLocks(self): b = self.build workerforbuilder1 = Mock() workerforbuilder2 = Mock() lock = WorkerLock('lock') counting_access = lock.access('counting') real_lock = yield b.builder.botmaster.getLockByID(lock, 0) # no locks, so both these pass (call twice to verify there's no # state/memory) lock_list = [(real_lock, counting_access)] self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_1 = real_lock.getLockForWorker( workerforbuilder1.worker.workername) worker_lock_2 = real_lock.getLockForWorker( workerforbuilder2.worker.workername) # then have workerforbuilder2 claim its lock: worker_lock_2.claim(workerforbuilder2, counting_access) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_2.release(workerforbuilder2, counting_access) # then have workerforbuilder1 claim its lock: worker_lock_1.claim(workerforbuilder1, counting_access) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertFalse( Build._canAcquireLocks(lock_list, workerforbuilder1)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) self.assertTrue( Build._canAcquireLocks(lock_list, workerforbuilder2)) worker_lock_1.release(workerforbuilder1, counting_access) def testBuilddirPropType(self): b = self.build b.build_status = Mock() b.builder.config.workerbuilddir = 'test' self.workerforbuilder.worker.worker_basedir = "/srv/buildbot/worker" self.workerforbuilder.worker.path_module = posixpath b.getProperties = Mock() b.setProperty = Mock() b.setupWorkerBuildirProperty(self.workerforbuilder) expected_path = '/srv/buildbot/worker/test' b.setProperty.assert_has_calls( [call('builddir', expected_path, 'Worker')], any_order=True) @defer.inlineCallbacks def testBuildLocksAcquired(self): b = self.build lock = WorkerLock('lock') claimCount = [0] lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) def claim(owner, access): claimCount[0] += 1 return real_lock.old_claim(owner, access) real_lock.old_claim = real_lock.claim real_lock.claim = claim yield b.setLocks([lock_access]) step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) self.assertEqual(claimCount[0], 1) @defer.inlineCallbacks def testBuildLocksOrder(self): """Test that locks are acquired in FIFO order; specifically that counting locks cannot jump ahead of exclusive locks""" eBuild = self.build cBuilder = FakeBuilder(self.master) cBuild = Build([self.request]) cBuild.setBuilder(cBuilder) eWorker = Mock() cWorker = Mock() eWorker.worker = self.worker cWorker.worker = self.worker eWorker.prepare = cWorker.prepare = lambda _: True eWorker.ping = cWorker.ping = lambda: True lock = WorkerLock('lock', 2) claimLog = [] real_workerlock = yield self.master.botmaster.getLockByID(lock, 0) realLock = real_workerlock.getLockForWorker(self.worker.workername) def claim(owner, access): claimLog.append(owner) return realLock.oldClaim(owner, access) realLock.oldClaim = realLock.claim realLock.claim = claim yield eBuild.setLocks([lock.access('exclusive')]) yield cBuild.setLocks([lock.access('counting')]) fakeBuild = Mock() fakeBuildAccess = lock.access('counting') realLock.claim(fakeBuild, fakeBuildAccess) step = FakeBuildStep() eBuild.setStepFactories([FakeStepFactory(step)]) cBuild.setStepFactories([FakeStepFactory(step)]) e = eBuild.startBuild(FakeBuildStatus(), eWorker) c = cBuild.startBuild(FakeBuildStatus(), cWorker) d = defer.DeferredList([e, c]) realLock.release(fakeBuild, fakeBuildAccess) yield d self.assertEqual(eBuild.results, SUCCESS) self.assertEqual(cBuild.results, SUCCESS) self.assertEqual(claimLog, [fakeBuild, eBuild, cBuild]) @defer.inlineCallbacks def testBuildWaitingForLocks(self): b = self.build lock = WorkerLock('lock') claimCount = [0] lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) def claim(owner, access): claimCount[0] += 1 return real_lock.old_claim(owner, access) real_lock.old_claim = real_lock.claim real_lock.claim = claim yield b.setLocks([lock_access]) step = FakeBuildStep() b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(claimCount[0], 1) self.assertTrue(b.currentStep is None) self.assertTrue(b._acquiringLock is not None) @defer.inlineCallbacks def testStopBuildWaitingForLocks(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) yield b.setLocks([lock_access]) step = FakeBuildStep() step.alwaysRun = False b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) def acquireLocks(res=None): retval = Build.acquireLocks(b, res) b.stopBuild('stop it') return retval b.acquireLocks = acquireLocks b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertTrue(b.currentStep is None) self.assertEqual(b.results, CANCELLED) @defer.inlineCallbacks def testStopBuildWaitingForLocks_lostRemote(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) yield b.setLocks([lock_access]) step = FakeBuildStep() step.alwaysRun = False b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) def acquireLocks(res=None): retval = Build.acquireLocks(b, res) b.lostRemote() return retval b.acquireLocks = acquireLocks b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertTrue(b.currentStep is None) self.assertEqual(b.results, RETRY) self.build.build_status.setText.assert_called_with( ["retry", "lost", "connection"]) self.build.build_status.setResults.assert_called_with(RETRY) @defer.inlineCallbacks def testStopBuildWaitingForStepLocks(self): b = self.build lock = WorkerLock('lock') lock_access = lock.access('counting') lock.access = lambda mode: lock_access real_workerlock = yield b.builder.botmaster.getLockByID(lock, 0) real_lock = real_workerlock.getLockForWorker(self.workerforbuilder.worker.workername) step = LoggingBuildStep(locks=[lock_access]) b.setStepFactories([FakeStepFactory(step)]) real_lock.claim(Mock(), lock.access('counting')) gotLocks = [False] def acquireLocks(res=None): gotLocks[0] = True retval = LoggingBuildStep.acquireLocks(step, res) self.assertTrue(b.currentStep is step) b.stopBuild('stop it') return retval step.acquireLocks = acquireLocks b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(gotLocks, [True]) self.assertEqual(b.results, CANCELLED) def testStepDone(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(SUCCESS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneHaltOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.haltOnFailure = True terminate = b.stepDone(FAILURE, step) self.assertTrue(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneHaltOnFailureNoFlunkOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnFailure = False step.haltOnFailure = True terminate = b.stepDone(FAILURE, step) self.assertTrue(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneFlunkOnWarningsFlunkOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnFailure = True step.flunkOnWarnings = True b.stepDone(WARNINGS, step) terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneNoWarnOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.warnOnWarnings = False terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, SUCCESS) def testStepDoneWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, WARNINGS) def testStepDoneFail(self): b = self.build b.results = SUCCESS step = FakeBuildStep() terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneFailOverridesWarnings(self): b = self.build b.results = WARNINGS step = FakeBuildStep() terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneWarnOnFailure(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.warnOnFailure = True step.flunkOnFailure = False terminate = b.stepDone(FAILURE, step) self.assertFalse(terminate.result) self.assertEqual(b.results, WARNINGS) def testStepDoneFlunkOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnWarnings = True terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneHaltOnFailureFlunkOnWarnings(self): b = self.build b.results = SUCCESS step = FakeBuildStep() step.flunkOnWarnings = True self.haltOnFailure = True terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneWarningsDontOverrideFailure(self): b = self.build b.results = FAILURE step = FakeBuildStep() terminate = b.stepDone(WARNINGS, step) self.assertFalse(terminate.result) self.assertEqual(b.results, FAILURE) def testStepDoneRetryOverridesAnythingElse(self): b = self.build b.results = RETRY step = FakeBuildStep() step.alwaysRun = True b.stepDone(WARNINGS, step) b.stepDone(FAILURE, step) b.stepDone(SUCCESS, step) terminate = b.stepDone(EXCEPTION, step) self.assertTrue(terminate.result) self.assertEqual(b.results, RETRY) def test_getSummaryStatistic(self): b = self.build b.executedSteps = [ BuildStep(), BuildStep(), BuildStep() ] b.executedSteps[0].setStatistic('casualties', 7) b.executedSteps[2].setStatistic('casualties', 4) add = operator.add self.assertEqual(b.getSummaryStatistic('casualties', add), 11) self.assertEqual(b.getSummaryStatistic('casualties', add, 10), 21) @defer.inlineCallbacks def testflushProperties(self): b = self.build b.build_status = FakeBuildStatus() b.setProperty("foo", "bar", "test") b.buildid = 43 result = 'SUCCESS' res = yield b._flushProperties(result) self.assertEqual(res, result) self.assertEqual(self.master.data.updates.properties, [(43, 'foo', 'bar', 'test')]) def create_fake_steps(self, names): steps = [] def create_fake_step(name): step = FakeBuildStep() step.name = name return step for name in names: step = create_fake_step(name) steps.append(step) return steps @defer.inlineCallbacks def testAddStepsAfterCurrentStep(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["d", "e"]) b.addStepsAfterCurrentStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) yield b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "d", "e", "c"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) @defer.inlineCallbacks def testAddStepsAfterLastStep(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["d", "e"]) b.addStepsAfterLastStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) yield b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "c", "d", "e"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesUnique(self): # if the step names are unique they should remain unchanged b = self.build steps = self.create_fake_steps(["clone", "command", "clean"]) b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["clone", "command", "clean"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesDuplicate(self): b = self.build steps = self.create_fake_steps(["stage", "stage", "stage"]) b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["stage", "stage_1", "stage_2"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) def testStepNamesDuplicateAfterAdd(self): b = self.build steps = self.create_fake_steps(["a", "b", "c"]) def startStepB(*args, **kw): new_steps = self.create_fake_steps(["c", "c"]) b.addStepsAfterCurrentStep([FakeStepFactory(s) for s in new_steps]) return SUCCESS steps[1].startStep = startStepB b.setStepFactories([FakeStepFactory(s) for s in steps]) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(b.results, SUCCESS) expected_names = ["a", "b", "c_1", "c_2", "c"] executed_names = [s.name for s in b.executedSteps] self.assertEqual(executed_names, expected_names) @defer.inlineCallbacks def testGetUrl(self): self.build.number = 3 url = yield self.build.getUrl() self.assertEqual(url, 'http://localhost:8080/#builders/83/builds/3') @defer.inlineCallbacks def testGetUrlForVirtualBuilder(self): # Let's fake a virtual builder self.builder._builders['wilma'] = 108 self.build.setProperty('virtual_builder_name', 'wilma', 'Build') self.build.setProperty('virtual_builder_tags', ['_virtual_']) self.build.number = 33 url = yield self.build.getUrl() self.assertEqual(url, 'http://localhost:8080/#builders/108/builds/33') def test_active_builds_metric(self): """ The number of active builds is increased when a build starts and decreased when it finishes. """ b = self.build controller, step_factory = makeControllableStepFactory() b.setStepFactories([step_factory]) observer = MetricLogObserver() observer.enable() self.addCleanup(observer.disable) def get_active_builds(): return observer.asDict()['counters'].get('active_builds', 0) self.assertEqual(get_active_builds(), 0) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.assertEqual(get_active_builds(), 1) controller.finishStep(SUCCESS) self.assertEqual(get_active_builds(), 0) def test_active_builds_metric_failure(self): """ The number of active builds is increased when a build starts and decreased when it finishes.. """ b = self.build b.setStepFactories([FailingStepFactory()]) observer = MetricLogObserver() observer.enable() self.addCleanup(observer.disable) def get_active_builds(): return observer.asDict()['counters'].get('active_builds', 0) self.assertEqual(get_active_builds(), 0) b.startBuild(FakeBuildStatus(), self.workerforbuilder) self.flushLoggedErrors(TestException) self.assertEqual(get_active_builds(), 0) class TestMultipleSourceStamps(unittest.TestCase): def setUp(self): r = FakeRequest() s1 = FakeSource() s1.repository = "repoA" s1.codebase = "A" s1.changes = [FakeChange(10), FakeChange(11)] s1.revision = "12345" s2 = FakeSource() s2.repository = "repoB" s2.codebase = "B" s2.changes = [FakeChange(12), FakeChange(13)] s2.revision = "67890" s3 = FakeSource() s3.repository = "repoC" # no codebase defined s3.changes = [FakeChange(14), FakeChange(15)] s3.revision = "111213" r.sources.extend([s1, s2, s3]) self.build = Build([r]) def test_buildReturnSourceStamp(self): """ Test that a build returns the correct sourcestamp """ source1 = self.build.getSourceStamp("A") source2 = self.build.getSourceStamp("B") self.assertEqual( [source1.repository, source1.revision], ["repoA", "12345"]) self.assertEqual( [source2.repository, source2.revision], ["repoB", "67890"]) def test_buildReturnSourceStamp_empty_codebase(self): """ Test that a build returns the correct sourcestamp if codebase is empty """ codebase = '' source3 = self.build.getSourceStamp(codebase) self.assertTrue(source3 is not None) self.assertEqual( [source3.repository, source3.revision], ["repoC", "111213"]) class TestBuildBlameList(unittest.TestCase): def setUp(self): self.sourceByMe = FakeSource() self.sourceByMe.repository = "repoA" self.sourceByMe.codebase = "A" self.sourceByMe.changes = [FakeChange(10), FakeChange(11)] self.sourceByMe.changes[0].who = "me" self.sourceByMe.changes[1].who = "me" self.sourceByHim = FakeSource() self.sourceByHim.repository = "repoB" self.sourceByHim.codebase = "B" self.sourceByHim.changes = [FakeChange(12), FakeChange(13)] self.sourceByHim.changes[0].who = "him" self.sourceByHim.changes[1].who = "him" self.patchSource = FakeSource() self.patchSource.repository = "repoB" self.patchSource.codebase = "B" self.patchSource.changes = [] self.patchSource.revision = "67890" self.patchSource.patch_info = ("jeff", "jeff's new feature") def test_blamelist_for_changes(self): r = FakeRequest() r.sources.extend([self.sourceByMe, self.sourceByHim]) build = Build([r]) blamelist = build.blamelist() self.assertEqual(blamelist, ['him', 'me']) def test_blamelist_for_patch(self): r = FakeRequest() r.sources.extend([self.patchSource]) build = Build([r]) blamelist = build.blamelist() # If no patch is set, author will not be est self.assertEqual(blamelist, []) class TestSetupProperties_MultipleSources(TestReactorMixin, unittest.TestCase): """ Test that the property values, based on the available requests, are initialized properly """ def setUp(self): self.setUpTestReactor() self.props = {} r = FakeRequest() r.sources = [] r.sources.append(FakeSource()) r.sources[0].changes = [FakeChange()] r.sources[0].repository = "http://svn-repo-A" r.sources[0].codebase = "A" r.sources[0].branch = "develop" r.sources[0].revision = "12345" r.sources.append(FakeSource()) r.sources[1].changes = [FakeChange()] r.sources[1].repository = "http://svn-repo-B" r.sources[1].codebase = "B" r.sources[1].revision = "34567" self.build = Build([r]) self.build.setStepFactories([]) self.builder = FakeBuilder(fakemaster.make_master(self, wantData=True)) self.build.setBuilder(self.builder) self.build.build_status = FakeBuildStatus() # record properties that will be set self.build.properties.setProperty = self.setProperty def setProperty(self, n, v, s, runtime=False): if s not in self.props: self.props[s] = {} if not self.props[s]: self.props[s] = {} self.props[s][n] = v def test_sourcestamp_properties_not_set(self): self.build.setupOwnProperties() self.assertNotIn("codebase", self.props["Build"]) self.assertNotIn("revision", self.props["Build"]) self.assertNotIn("branch", self.props["Build"]) self.assertNotIn("project", self.props["Build"]) self.assertNotIn("repository", self.props["Build"]) class TestSetupProperties_SingleSource(TestReactorMixin, unittest.TestCase): """ Test that the property values, based on the available requests, are initialized properly """ def setUp(self): self.setUpTestReactor() self.props = {} r = FakeRequest() r.sources = [] r.sources.append(FakeSource()) r.sources[0].changes = [FakeChange()] r.sources[0].repository = "http://svn-repo-A" r.sources[0].codebase = "A" r.sources[0].branch = "develop" r.sources[0].revision = "12345" self.build = Build([r]) self.build.setStepFactories([]) self.builder = FakeBuilder(fakemaster.make_master(self, wantData=True)) self.build.setBuilder(self.builder) self.build.build_status = FakeBuildStatus() # record properties that will be set self.build.properties.setProperty = self.setProperty def setProperty(self, n, v, s, runtime=False): if s not in self.props: self.props[s] = {} if not self.props[s]: self.props[s] = {} self.props[s][n] = v def test_properties_codebase(self): self.build.setupOwnProperties() codebase = self.props["Build"]["codebase"] self.assertEqual(codebase, "A") def test_properties_repository(self): self.build.setupOwnProperties() repository = self.props["Build"]["repository"] self.assertEqual(repository, "http://svn-repo-A") def test_properties_revision(self): self.build.setupOwnProperties() revision = self.props["Build"]["revision"] self.assertEqual(revision, "12345") def test_properties_branch(self): self.build.setupOwnProperties() branch = self.props["Build"]["branch"] self.assertEqual(branch, "develop") def test_property_project(self): self.build.setupOwnProperties() project = self.props["Build"]["project"] self.assertEqual(project, '') class TestBuildProperties(TestReactorMixin, unittest.TestCase): """ Test that a Build has the necessary L{IProperties} methods, and that they properly delegate to the C{build_status} attribute - so really just a test of the L{IProperties} adapter. """ def setUp(self): self.setUpTestReactor() @implementer(interfaces.IProperties) class FakeProperties(Mock): pass FakeProperties.render = Mock(side_effect=lambda x: x) class FakeBuildStatus(Mock): pass r = FakeRequest() r.sources = [FakeSource()] r.sources[0].changes = [FakeChange()] r.sources[0].revision = "12345" self.master = fakemaster.make_master(self, wantData=True) self.worker = worker.FakeWorker(self.master) self.worker.attached(None) self.workerforbuilder = Mock(name='workerforbuilder') self.workerforbuilder.worker = self.worker self.build = Build([r]) self.build.setStepFactories([]) self.builder = FakeBuilder(fakemaster.make_master(self, wantData=True)) self.build.setBuilder(self.builder) self.properties = self.build.properties = FakeProperties() self.build_status = FakeBuildStatus() self.build._flushProperties = Mock() self.build.startBuild(self.build_status, self.workerforbuilder) def test_getProperty(self): self.build.getProperty('x') self.properties.getProperty.assert_called_with('x', None) def test_getProperty_default(self): self.build.getProperty('x', 'nox') self.properties.getProperty.assert_called_with('x', 'nox') def test_setProperty(self): self.build.setProperty('n', 'v', 's') self.properties.setProperty.assert_called_with('n', 'v', 's', runtime=True) def test_hasProperty(self): self.properties.hasProperty.return_value = True self.assertTrue(self.build.hasProperty('p')) self.properties.hasProperty.assert_called_with('p') def test_has_key(self): self.properties.has_key.return_value = True # getattr because pep8 doesn't like calls to has_key self.assertTrue(getattr(self.build, 'has_key')('p')) # has_key calls through to hasProperty self.properties.hasProperty.assert_called_with('p') def test_render(self): self.build.render("xyz") self.properties.render.assert_called_with("xyz") buildbot-2.6.0/master/buildbot/test/unit/test_process_builder.py000066400000000000000000000522501361162603000251570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import builder from buildbot.process import factory from buildbot.process.properties import Properties from buildbot.process.properties import renderer from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import epoch2datetime from buildbot.worker import AbstractLatentWorker class BuilderMixin: def setUpBuilderMixin(self): self.factory = factory.BuildFactory() self.master = fakemaster.make_master(self, wantData=True) self.mq = self.master.mq self.db = self.master.db # returns a Deferred that returns None def makeBuilder(self, name="bldr", patch_random=False, noReconfig=False, **config_kwargs): """Set up C{self.bldr}""" # only include the necessary required config, plus user-requested config_args = dict(name=name, workername="wrk", builddir="bdir", workerbuilddir="wbdir", factory=self.factory) config_args.update(config_kwargs) self.builder_config = config.BuilderConfig(**config_args) self.bldr = builder.Builder( self.builder_config.name) self.bldr.master = self.master self.bldr.botmaster = self.master.botmaster # patch into the _startBuildsFor method self.builds_started = [] def _startBuildFor(workerforbuilder, buildrequests): self.builds_started.append((workerforbuilder, buildrequests)) return defer.succeed(True) self.bldr._startBuildFor = _startBuildFor if patch_random: # patch 'random.choice' to always take the worker that sorts # last, based on its name self.patch(random, "choice", lambda lst: sorted(lst, key=lambda m: m.name)[-1]) self.bldr.startService() mastercfg = config.MasterConfig() mastercfg.builders = [self.builder_config] if not noReconfig: return self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) class FakeWorker: builds_may_be_incompatible = False def __init__(self, workername): self.workername = workername class TestBuilder(TestReactorMixin, BuilderMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # a collection of rows that would otherwise clutter up every test self.setUpBuilderMixin() self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), ] @defer.inlineCallbacks def makeBuilder(self, patch_random=False, startBuildsForSucceeds=True, **config_kwargs): yield super().makeBuilder(patch_random=patch_random, **config_kwargs) # patch into the _startBuildsFor method self.builds_started = [] def _startBuildFor(workerforbuilder, buildrequests): self.builds_started.append((workerforbuilder, buildrequests)) return defer.succeed(startBuildsForSucceeds) self.bldr._startBuildFor = _startBuildFor def assertBuildsStarted(self, exp): # munge builds_started into a list of (worker, [brids]) builds_started = [ (wrk.name, [br.id for br in buildreqs]) for (wrk, buildreqs) in self.builds_started] self.assertEqual(sorted(builds_started), sorted(exp)) def setWorkerForBuilders(self, workerforbuilders): """C{workerforbuilders} maps name : available""" self.bldr.workers = [] for name, avail in workerforbuilders.items(): wfb = mock.Mock(spec=['isAvailable'], name=name) wfb.name = name wfb.isAvailable.return_value = avail self.bldr.workers.append(wfb) # services @defer.inlineCallbacks def test_maybeStartBuild_builder_stopped(self): yield self.makeBuilder() # this will cause an exception if maybeStartBuild tries to start self.bldr.workers = None # so we just hope this does not fail yield self.bldr.stopService() started = yield self.bldr.maybeStartBuild(None, []) self.assertEqual(started, False) # maybeStartBuild def _makeMocks(self): worker = mock.Mock() worker.name = 'worker' buildrequest = mock.Mock() buildrequest.id = 10 buildrequests = [buildrequest] return worker, buildrequests @defer.inlineCallbacks def test_maybeStartBuild(self): yield self.makeBuilder() worker, buildrequests = self._makeMocks() started = yield self.bldr.maybeStartBuild(worker, buildrequests) self.assertEqual(started, True) self.assertBuildsStarted([('worker', [10])]) @defer.inlineCallbacks def test_maybeStartBuild_failsToStart(self): yield self.makeBuilder(startBuildsForSucceeds=False) worker, buildrequests = self._makeMocks() started = yield self.bldr.maybeStartBuild(worker, buildrequests) self.assertEqual(started, False) self.assertBuildsStarted([('worker', [10])]) @defer.inlineCallbacks def do_test_getCollapseRequestsFn(self, builder_param=None, global_param=None, expected=0): def cble(): pass builder_param = cble if builder_param == 'callable' else builder_param global_param = cble if global_param == 'callable' else global_param # omit the constructor parameter if None was given if builder_param is None: yield self.makeBuilder() else: yield self.makeBuilder(collapseRequests=builder_param) self.master.config.collapseRequests = global_param fn = self.bldr.getCollapseRequestsFn() if fn == builder.Builder._defaultCollapseRequestFn: fn = "default" elif fn is cble: fn = 'callable' self.assertEqual(fn, expected) def test_getCollapseRequestsFn_defaults(self): self.do_test_getCollapseRequestsFn(None, None, "default") def test_getCollapseRequestsFn_global_True(self): self.do_test_getCollapseRequestsFn(None, True, "default") def test_getCollapseRequestsFn_global_False(self): self.do_test_getCollapseRequestsFn(None, False, None) def test_getCollapseRequestsFn_global_function(self): self.do_test_getCollapseRequestsFn(None, 'callable', 'callable') def test_getCollapseRequestsFn_builder_True(self): self.do_test_getCollapseRequestsFn(True, False, "default") def test_getCollapseRequestsFn_builder_False(self): self.do_test_getCollapseRequestsFn(False, True, None) def test_getCollapseRequestsFn_builder_function(self): self.do_test_getCollapseRequestsFn('callable', None, 'callable') # canStartBuild @defer.inlineCallbacks def test_canStartBuild_no_constraints(self): yield self.makeBuilder() wfb = mock.Mock() wfb.worker = FakeWorker('worker') startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, True) @defer.inlineCallbacks def test_canStartBuild_config_canStartBuild_returns_value(self): yield self.makeBuilder() def canStartBuild(bldr, worker, breq): return breq == 100 canStartBuild = mock.Mock(side_effect=canStartBuild) self.bldr.config.canStartBuild = canStartBuild wfb = mock.Mock() wfb.worker = FakeWorker('worker') startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) canStartBuild.assert_called_with(self.bldr, wfb, 100) canStartBuild.reset_mock() startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, False) canStartBuild.assert_called_with(self.bldr, wfb, 101) canStartBuild.reset_mock() @defer.inlineCallbacks def test_canStartBuild_config_canStartBuild_returns_deferred(self): yield self.makeBuilder() wfb = mock.Mock() wfb.worker = FakeWorker('worker') def canStartBuild(bldr, wfb, breq): return defer.succeed(breq == 100) canStartBuild = mock.Mock(side_effect=canStartBuild) self.bldr.config.canStartBuild = canStartBuild startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) canStartBuild.assert_called_with(self.bldr, wfb, 100) canStartBuild.reset_mock() startable = yield self.bldr.canStartBuild(wfb, 101) self.assertEqual(startable, False) canStartBuild.assert_called_with(self.bldr, wfb, 101) canStartBuild.reset_mock() @defer.inlineCallbacks def test_canStartBuild_cant_acquire_locks_but_no_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[]) wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, True) @defer.inlineCallbacks def test_canStartBuild_with_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[mock.Mock()]) wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, False) @defer.inlineCallbacks def test_canStartBuild_with_renderable_locks(self): yield self.makeBuilder() self.bldr.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[mock.Mock()]) renderedLocks = [False] @renderer def rendered_locks(props): renderedLocks[0] = True return [mock.Mock()] self.bldr.config.locks = rendered_locks wfb = mock.Mock() wfb.worker = FakeWorker('worker') with mock.patch( 'buildbot.process.build.Build._canAcquireLocks', mock.Mock(return_value=False)): with mock.patch( 'buildbot.process.build.Build.setupPropertiesKnownBeforeBuildStarts', mock.Mock()): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertEqual(startable, False) self.assertTrue(renderedLocks[0]) @defer.inlineCallbacks def test_canStartBuild_with_incompatible_latent_worker(self): yield self.makeBuilder() class FakeLatentWorker(AbstractLatentWorker): builds_may_be_incompatible = True def __init__(self): pass def isCompatibleWithBuild(self, build_props): return defer.succeed(False) def checkConfig(self, name, _, **kwargs): pass def reconfigService(self, name, _, **kwargs): pass wfb = mock.Mock() wfb.worker = FakeLatentWorker() with mock.patch( 'buildbot.process.build.Build.setupPropertiesKnownBeforeBuildStarts', mock.Mock()): startable = yield self.bldr.canStartBuild(wfb, 100) self.assertFalse(startable) @defer.inlineCallbacks def test_canStartBuild_enforceChosenWorker(self): """enforceChosenWorker rejects and accepts builds""" yield self.makeBuilder() self.bldr.config.canStartBuild = builder.enforceChosenWorker workerforbuilder = mock.Mock() workerforbuilder.worker = FakeWorker('worker5') breq = mock.Mock() # no worker requested breq.properties = {} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # worker requested as the right one breq.properties = {'workername': 'worker5'} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # worker requested as the wrong one breq.properties = {'workername': 'worker4'} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(False, result) # worker set to non string value gets skipped breq.properties = {'workername': 0} result = yield self.bldr.canStartBuild(workerforbuilder, breq) self.assertIdentical(True, result) # other methods @defer.inlineCallbacks def test_getBuilderId(self): self.factory = factory.BuildFactory() self.master = fakemaster.make_master(self, wantData=True) # only include the necessary required config, plus user-requested self.bldr = builder.Builder('bldr') self.bldr.master = self.master self.master.data.updates.findBuilderId = fbi = mock.Mock() fbi.return_value = defer.succeed(13) builderid = yield self.bldr.getBuilderId() self.assertEqual(builderid, 13) fbi.assert_called_with('bldr') fbi.reset_mock() builderid = yield self.bldr.getBuilderId() self.assertEqual(builderid, 13) fbi.assert_not_called() def test_expectations_deprecated(self): self.successResultOf(self.makeBuilder()) with assertProducesWarning( Warning, message_pattern="'Builder.expectations' is deprecated."): deprecated = self.bldr.expectations self.assertIdentical(deprecated, None) @defer.inlineCallbacks def test_defaultProperties(self): props = Properties() props.setProperty('foo', 1, 'Scheduler') props.setProperty('bar', 'bleh', 'Change') yield self.makeBuilder(defaultProperties={'bar': 'onoes', 'cuckoo': 42}) self.bldr.setupProperties(props) self.assertEquals(props.getProperty('bar'), 'bleh') self.assertEquals(props.getProperty('cuckoo'), 42) class TestGetBuilderId(TestReactorMixin, BuilderMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() @defer.inlineCallbacks def test_getBuilderId(self): # noReconfig because reconfigService calls getBuilderId, and we haven't # set up the mock findBuilderId yet. yield self.makeBuilder(name='b1', noReconfig=True) fbi = self.master.data.updates.findBuilderId = mock.Mock(name='fbi') fbi.side_effect = lambda name: defer.succeed(13) # call twice.. self.assertEqual((yield self.bldr.getBuilderId()), 13) self.assertEqual((yield self.bldr.getBuilderId()), 13) # and see that fbi was only called once fbi.assert_called_once_with('b1') # check that the name was unicodified arg = fbi.mock_calls[0][1][0] self.assertIsInstance(arg, str) class TestGetOldestRequestTime(TestReactorMixin, BuilderMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() # a collection of rows that would otherwise clutter up every test master_id = fakedb.FakeBuildRequestsComponent.MASTER_ID self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), fakedb.Builder(id=77, name='bldr1'), fakedb.Builder(id=78, name='bldr2'), fakedb.Builder(id=182, name='foo@bar'), fakedb.BuildRequest(id=111, submitted_at=1000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=222, submitted_at=2000, builderid=77, buildsetid=11), fakedb.BuildRequestClaim(brid=222, masterid=master_id, claimed_at=2001), fakedb.BuildRequest(id=333, submitted_at=3000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=444, submitted_at=2500, builderid=78, buildsetid=11), fakedb.BuildRequestClaim(brid=444, masterid=master_id, claimed_at=2501), fakedb.BuildRequest(id=555, submitted_at=2800, builderid=182, buildsetid=11), ] yield self.db.insertTestData(self.base_rows) @defer.inlineCallbacks def test_gort_unclaimed(self): yield self.makeBuilder(name='bldr1') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, epoch2datetime(1000)) @defer.inlineCallbacks def test_gort_bldr_name_not_identifier(self): # this is a regression test for #2940 yield self.makeBuilder(name='foo@bar') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, epoch2datetime(2800)) @defer.inlineCallbacks def test_gort_all_claimed(self): yield self.makeBuilder(name='bldr2') rqtime = yield self.bldr.getOldestRequestTime() self.assertEqual(rqtime, None) class TestGetNewestCompleteTime(TestReactorMixin, BuilderMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() # a collection of rows that would otherwise clutter up every test master_id = fakedb.FakeBuildRequestsComponent.MASTER_ID self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(buildsetid=11, sourcestampid=21), fakedb.Builder(id=77, name='bldr1'), fakedb.Builder(id=78, name='bldr2'), fakedb.BuildRequest(id=111, submitted_at=1000, complete_at=1000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=222, submitted_at=2000, complete_at=4000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=333, submitted_at=3000, complete_at=3000, builderid=77, buildsetid=11), fakedb.BuildRequest(id=444, submitted_at=2500, builderid=78, buildsetid=11), fakedb.BuildRequestClaim(brid=444, masterid=master_id, claimed_at=2501), ] yield self.db.insertTestData(self.base_rows) @defer.inlineCallbacks def test_gnct_completed(self): yield self.makeBuilder(name='bldr1') rqtime = yield self.bldr.getNewestCompleteTime() self.assertEqual(rqtime, epoch2datetime(4000)) @defer.inlineCallbacks def test_gnct_no_completed(self): yield self.makeBuilder(name='bldr2') rqtime = yield self.bldr.getNewestCompleteTime() self.assertEqual(rqtime, None) class TestReconfig(TestReactorMixin, BuilderMixin, unittest.TestCase): """Tests that a reconfig properly updates all attributes""" def setUp(self): self.setUpTestReactor() self.setUpBuilderMixin() @defer.inlineCallbacks def test_reconfig(self): yield self.makeBuilder(description="Old", tags=["OldTag"]) config_args = dict(name='bldr', workername="wrk", builddir="bdir", workerbuilddir="wbdir", factory=self.factory, description='Noe', tags=['NewTag']) new_builder_config = config.BuilderConfig(**config_args) new_builder_config.description = "New" new_builder_config.tags = ["NewTag"] mastercfg = config.MasterConfig() mastercfg.builders = [new_builder_config] yield self.bldr.reconfigServiceWithBuildbotConfig(mastercfg) self.assertEqual( dict(description=self.bldr.builder_status.getDescription(), tags=self.bldr.builder_status.getTags()), dict(description="New", tags=["NewTag"])) self.assertIdentical(self.bldr.config, new_builder_config) # check that the reconfig grabbed a buliderid self.assertNotEqual(self.bldr._builderid, None) buildbot-2.6.0/master/buildbot/test/unit/test_process_buildrequest.py000066400000000000000000000622171361162603000262450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import buildrequest from buildbot.process.builder import Builder from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestBuildRequestCollapser(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True) self.master.botmaster = mock.Mock(name='botmaster') self.master.botmaster.builders = {} self.builders = {} self.bldr = yield self.createBuilder('A', builderid=77) @defer.inlineCallbacks def createBuilder(self, name, builderid=None): if builderid is None: b = fakedb.Builder(name=name) yield self.master.db.insertTestData([b]) builderid = b.id bldr = mock.Mock(name=name) bldr.name = name bldr.master = self.master self.master.botmaster.builders[name] = bldr self.builders[name] = bldr bldr.getCollapseRequestsFn = lambda: False return bldr def tearDown(self): pass @defer.inlineCallbacks def do_request_collapse(self, rows, brids, exp): yield self.master.db.insertTestData(rows) brCollapser = buildrequest.BuildRequestCollapser(self.master, brids) self.assertEqual(exp, (yield brCollapser.collapse())) def test_collapseRequests_no_other_request(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # Allow all requests self.fail("Should never be called") return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn rows = [ fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='A'), fakedb.Change(changeid=14, codebase='A', sourcestampid=234), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] return self.do_request_collapse(rows, [19], []) BASE_ROWS = [ fakedb.Builder(id=77, name='A'), fakedb.SourceStamp(id=234, codebase='C'), fakedb.Buildset(id=30, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=234, buildsetid=30), fakedb.SourceStamp(id=235, codebase='C'), fakedb.Buildset(id=31, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=235, buildsetid=31), fakedb.SourceStamp(id=236, codebase='C'), fakedb.Buildset(id=32, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=236, buildsetid=32), fakedb.BuildRequest(id=19, buildsetid=30, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.BuildRequest(id=20, buildsetid=31, builderid=77, priority=13, submitted_at=1300305712, results=-1), fakedb.BuildRequest(id=21, buildsetid=32, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] def test_collapseRequests_no_collapse(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # Fail all collapse attempts return False self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21], []) def test_collapseRequests_collapse_all(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # collapse all attempts return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21], [19, 20]) def test_collapseRequests_collapse_all_duplicates(self): def collapseRequests_fn(master, builder, brdict1, brdict2): # collapse all attempts return True self.bldr.getCollapseRequestsFn = lambda: collapseRequests_fn return self.do_request_collapse(self.BASE_ROWS, [21, 21], [19, 20]) # As documented: # Sourcestamps are compatible if all of the below conditions are met: # # * Their codebase, branch, project, and repository attributes match exactly # * Neither source stamp has a patch (e.g., from a try scheduler) # * Either both source stamps are associated with changes, or neither are associated with changes but they have matching revisions. def makeBuildRequestRows(self, brid, bsid, changeid, ssid, codebase, branch=None, project=None, repository=None, patchid=None, revision=None): rows = [ fakedb.SourceStamp(id=ssid, codebase=codebase, branch=branch, project=project, repository=repository, patchid=patchid, revision=revision), fakedb.Buildset(id=bsid, reason='foo', submitted_at=1300305712, results=-1), fakedb.BuildsetSourceStamp(sourcestampid=ssid, buildsetid=bsid), fakedb.BuildRequest(id=brid, buildsetid=bsid, builderid=77, priority=13, submitted_at=1300305712, results=-1), ] if changeid: rows.append( fakedb.Change(changeid=changeid, branch='trunk', revision='9283', repository='svn://...', project='world-domination', sourcestampid=ssid) ) if patchid: rows.append( fakedb.Patch(id=patchid, patch_base64='aGVsbG8sIHdvcmxk', patch_author='bar', patch_comment='foo', subdir='/foo', patchlevel=3)) return rows @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19, 20]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_branches(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', 'br1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', 'br2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', 'br2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', 'br3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_repository(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', None, 'p1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', None, 'p2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', None, 'p2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', None, 'p3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_codebases_projects(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A', None, None, 'project1') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C', None, None, 'project2') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', None, None, 'project2') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C', None, None, 'project3') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [19]) # * Neither source stamp has a patch (e.g., from a try scheduler) @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_a_patch(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', patchid=123) rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) # * Either both source stamps are associated with changes.. @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_changes(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, 123, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C') rows += self.makeBuildRequestRows(20, 120, 124, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) # * ... or neither are associated with changes but they have matching revisions. @defer.inlineCallbacks def test_collapseRequests_collapse_default_with_non_matching_revision(self): def collapseRequests_fn(master, builder, brdict1, brdict2): return buildrequest.BuildRequest.canBeCollapsed(builder.master, brdict1, brdict2) rows = [ fakedb.Builder(id=77, name='A'), ] rows += self.makeBuildRequestRows(22, 122, None, 222, 'A') rows += self.makeBuildRequestRows(21, 121, None, 221, 'C') rows += self.makeBuildRequestRows(19, 119, None, 210, 'C', revision='abcd1234') rows += self.makeBuildRequestRows(20, 120, None, 220, 'C') self.bldr.getCollapseRequestsFn = lambda: Builder._defaultCollapseRequestFn yield self.do_request_collapse(rows, [22], []) yield self.do_request_collapse(rows, [21], [20]) class TestBuildRequest(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_fromBrdict(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9284', repository='svn://...', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://...', project='world-domination', sourcestampid=234), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetProperty(buildsetid=539, property_name='x', property_value='[1, "X"]'), fakedb.BuildsetProperty(buildsetid=539, property_name='y', property_value='[2, "Y"]'), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=1200000000), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) # check enough of the source stamp to verify it found the changes self.assertEqual([ss.ssid for ss in br.sources.values()], [234]) self.assertEqual(br.reason, 'triggered') self.assertEqual(br.properties.getProperty('x'), 1) self.assertEqual(br.properties.getProperty('y'), 2) self.assertEqual(br.submittedAt, 1200000000) self.assertEqual(br.buildername, 'bldr') self.assertEqual(br.priority, 13) self.assertEqual(br.id, 288) self.assertEqual(br.bsid, 539) @defer.inlineCallbacks def test_fromBrdict_submittedAt_NULL(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9284', repository='svn://...', project='world-domination'), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=None), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) # remaining fields assumed to be checked in test_fromBrdict self.assertEqual(br.submittedAt, None) def test_fromBrdict_no_sourcestamps(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=78, name='not important'), fakedb.Buildset(id=539, reason='triggered'), # buildset has no sourcestamps fakedb.BuildRequest(id=288, buildsetid=539, builderid=78, priority=0, submitted_at=None), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict d = master.db.buildrequests.getBuildRequest(288) d.addCallback(lambda brdict: buildrequest.BuildRequest.fromBrdict(master, brdict)) return self.assertFailure(d, AssertionError) @defer.inlineCallbacks def test_fromBrdict_multiple_sourcestamps(self): master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=234), fakedb.SourceStamp(id=235, branch='trunk', revision='9284', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=14, branch='trunk', revision='9284', repository='svn://b..', codebase='B', project='world-domination', sourcestampid=235), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetProperty(buildsetid=539, property_name='x', property_value='[1, "X"]'), fakedb.BuildsetProperty(buildsetid=539, property_name='y', property_value='[2, "Y"]'), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77, priority=13, submitted_at=1200000000), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) br = yield buildrequest.BuildRequest.fromBrdict(master, brdict) self.assertEqual(br.reason, 'triggered') self.assertEqual(br.properties.getProperty('x'), 1) self.assertEqual(br.properties.getProperty('y'), 2) self.assertEqual(br.submittedAt, 1200000000) self.assertEqual(br.buildername, 'bldr') self.assertEqual(br.priority, 13) self.assertEqual(br.id, 288) self.assertEqual(br.bsid, 539) @defer.inlineCallbacks def test_mergeSourceStampsWith_common_codebases(self): """ This testcase has two buildrequests Request Change Codebase Revision Comment ---------------------------------------------------------------------- 288 13 A 9283 289 15 A 9284 288 14 B 9200 289 16 B 9201 -------------------------------- After merged in Build: Source1 has rev 9284 and contains changes 13 and 15 from repository svn://a Source2 has rev 9201 and contains changes 14 and 16 from repository svn://b """ brs = [] # list of buildrequests master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=234, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=13, branch='trunk', revision='9283', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=234), fakedb.SourceStamp(id=235, branch='trunk', revision='9200', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=14, branch='trunk', revision='9200', repository='svn://b..', codebase='A', project='world-domination', sourcestampid=235), fakedb.SourceStamp(id=236, branch='trunk', revision='9284', repository='svn://a..', codebase='A', project='world-domination'), fakedb.Change(changeid=15, branch='trunk', revision='9284', repository='svn://a..', codebase='A', project='world-domination', sourcestampid=236), fakedb.SourceStamp(id=237, branch='trunk', revision='9201', repository='svn://b..', codebase='B', project='world-domination'), fakedb.Change(changeid=16, branch='trunk', revision='9201', repository='svn://b..', codebase='B', project='world-domination', sourcestampid=237), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=234), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=235), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77), fakedb.Buildset(id=540, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=236), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=237), fakedb.BuildRequest(id=289, buildsetid=540, builderid=77), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict brdict = yield master.db.buildrequests.getBuildRequest(288) res = yield buildrequest.BuildRequest.fromBrdict(master, brdict) brs.append(res) brdict = yield master.db.buildrequests.getBuildRequest(289) res = yield buildrequest.BuildRequest.fromBrdict(master, brdict) brs.append(res) sources = brs[0].mergeSourceStampsWith(brs[1:]) source1 = source2 = None for source in sources: if source.codebase == 'A': source1 = source if source.codebase == 'B': source2 = source self.assertFalse(source1 is None) self.assertEqual(source1.revision, '9284') self.assertFalse(source2 is None) self.assertEqual(source2.revision, '9201') @defer.inlineCallbacks def test_canBeCollapsed_different_codebases_raises_error(self): """ This testcase has two buildrequests Request Change Codebase Revision Comment ---------------------------------------------------------------------- 288 17 C 1800 request 1 has repo not in request 2 289 18 D 2100 request 2 has repo not in request 1 -------------------------------- Merge cannot be performed and raises error: Merging requests requires both requests to have the same codebases """ brDicts = [] # list of buildrequests dictionary master = fakemaster.make_master(self, wantData=True, wantDb=True) master.db.insertTestData([ fakedb.Builder(id=77, name='bldr'), fakedb.SourceStamp(id=238, branch='trunk', revision='1800', repository='svn://c..', codebase='C', project='world-domination'), fakedb.Change(changeid=17, branch='trunk', revision='1800', repository='svn://c..', codebase='C', project='world-domination', sourcestampid=238), fakedb.SourceStamp(id=239, branch='trunk', revision='2100', repository='svn://d..', codebase='D', project='world-domination'), fakedb.Change(changeid=18, branch='trunk', revision='2100', repository='svn://d..', codebase='D', project='world-domination', sourcestampid=239), fakedb.Buildset(id=539, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=539, sourcestampid=238), fakedb.BuildRequest(id=288, buildsetid=539, builderid=77), fakedb.Buildset(id=540, reason='triggered'), fakedb.BuildsetSourceStamp(buildsetid=540, sourcestampid=239), fakedb.BuildRequest(id=289, buildsetid=540, builderid=77), ]) # use getBuildRequest to minimize the risk from changes to the format # of the brdict req = yield master.db.buildrequests.getBuildRequest(288) brDicts.append(req) req = yield master.db.buildrequests.getBuildRequest(289) brDicts.append(req) can_collapse = \ yield buildrequest.BuildRequest.canBeCollapsed(master, brDicts[0], brDicts[1]) self.assertEqual(can_collapse, False) buildbot-2.6.0/master/buildbot/test/unit/test_process_buildrequestdistributor.py000066400000000000000000001044061361162603000305350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import random import mock from twisted.internet import defer from twisted.python import failure from twisted.trial import unittest from buildbot import config from buildbot.db import buildrequests from buildbot.process import buildrequestdistributor from buildbot.process import factory from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.warnings import assertProducesWarning from buildbot.util import epoch2datetime from buildbot.util.eventual import fireEventually def nth_worker(n): def pick_nth_by_name(builder, workers=None, br=None): if workers is None: workers = builder workers = workers[:] workers.sort(key=lambda a: a.name) return workers[n] return pick_nth_by_name class TestBRDBase(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.botmaster = mock.Mock(name='botmaster') self.botmaster.builders = {} self.builders = {} def prioritizeBuilders(master, builders): # simple sort-by-name by default return sorted(builders, key=lambda b1: b1.name) self.master = self.botmaster.master = \ fakemaster.make_master(self, wantData=True, wantDb=True) self.master.caches = fakemaster.FakeCaches() self.master.config.prioritizeBuilders = prioritizeBuilders self.brd = buildrequestdistributor.BuildRequestDistributor( self.botmaster) self.brd.parent = self.botmaster self.brd.startService() # a collection of rows that would otherwise clutter up every test self.base_rows = [ fakedb.SourceStamp(id=21), fakedb.Builder(id=77, name='A'), fakedb.Buildset(id=11, reason='because'), fakedb.BuildsetSourceStamp(sourcestampid=21, buildsetid=11), ] def tearDown(self): if self.brd.running: return self.brd.stopService() def make_workers(self, worker_count): rows = self.base_rows[:] for i in range(worker_count): self.addWorkers({'test-worker%d' % i: 1}) rows.append(fakedb.Buildset(id=100 + i, reason='because')) rows.append( fakedb.BuildsetSourceStamp(buildsetid=100 + i, sourcestampid=21)) rows.append( fakedb.BuildRequest(id=10 + i, buildsetid=100 + i, builderid=77)) return rows def addWorkers(self, workerforbuilders): """C{workerforbuilders} maps name : available""" for name, avail in workerforbuilders.items(): wfb = mock.Mock(spec=['isAvailable'], name=name) wfb.name = name wfb.isAvailable.return_value = avail for bldr in self.builders.values(): bldr.workers.append(wfb) @defer.inlineCallbacks def createBuilder(self, name, builderid=None, builder_config=None): if builderid is None: b = fakedb.Builder(name=name) yield self.master.db.insertTestData([b]) builderid = b.id bldr = mock.Mock(name=name) bldr.name = name self.botmaster.builders[name] = bldr self.builders[name] = bldr def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) d = defer.Deferred() self.reactor.callLater(0, d.callback, True) return d bldr.maybeStartBuild = maybeStartBuild bldr.getCollapseRequestsFn = lambda: False bldr.workers = [] bldr.getAvailableWorkers = lambda: [ w for w in bldr.workers if w.isAvailable()] bldr.getBuilderId = lambda: (builderid) if builder_config is None: bldr.config.nextWorker = None bldr.config.nextBuild = None else: bldr.config = builder_config def canStartBuild(*args): can = bldr.config.canStartBuild return not can or can(*args) bldr.canStartBuild = canStartBuild return bldr @defer.inlineCallbacks def addBuilders(self, names): self.startedBuilds = [] for name in names: yield self.createBuilder(name) def assertMyClaims(self, brids): self.assertEqual(self.master.data.updates.claimedBuildRequests, set(brids)) class Test(TestBRDBase): def checkAllCleanedUp(self): # check that the BRD didn't end with a stuck lock or in the 'active' state (which would mean # it ended without unwinding correctly) self.assertEqual(self.brd.pending_builders_lock.locked, False) self.assertEqual(self.brd.activity_lock.locked, False) self.assertEqual(self.brd.active, False) def useMock_maybeStartBuildsOnBuilder(self): # sets up a mock "maybeStartBuildsOnBuilder" so we can track # how the method gets invoked # keep track of the calls to brd.maybeStartBuildsOnBuilder self.maybeStartBuildsOnBuilder_calls = [] def maybeStartBuildsOnBuilder(bldr): self.assertIdentical(self.builders[bldr.name], bldr) self.maybeStartBuildsOnBuilder_calls.append(bldr.name) return fireEventually() self.brd._maybeStartBuildsOnBuilder = maybeStartBuildsOnBuilder def removeBuilder(self, name): del self.builders[name] del self.botmaster.builders[name] # tests @defer.inlineCallbacks def test_maybeStartBuildsOn_simple(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1']) yield self.brd.maybeStartBuildsOn(['bldr1']) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr1']) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_parallel(self): # test 15 "parallel" invocations of maybeStartBuildsOn, with a # _sortBuilders that takes a while. This is a regression test for bug # 1979. builders = ['bldr%02d' % i for i in range(15)] def slow_sorter(master, bldrs): bldrs.sort(key=lambda b1: b1.name) d = defer.Deferred() self.reactor.callLater(0, d.callback, bldrs) def done(_): return _ d.addCallback(done) return d self.master.config.prioritizeBuilders = slow_sorter self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(builders) for bldr in builders: yield self.brd.maybeStartBuildsOn([bldr]) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, builders) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_exception(self): self.addBuilders(['bldr1']) def _maybeStartBuildsOnBuilder(n): # fail slowly, so that the activity loop doesn't exit too soon d = defer.Deferred() self.reactor.callLater(0, d.errback, failure.Failure(RuntimeError("oh noes"))) return d self.brd._maybeStartBuildsOnBuilder = _maybeStartBuildsOnBuilder yield self.brd.maybeStartBuildsOn(['bldr1']) yield self.brd._waitForFinish() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_collapsing(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1', 'bldr2', 'bldr3']) yield self.brd.maybeStartBuildsOn(['bldr3']) yield self.brd.maybeStartBuildsOn(['bldr2', 'bldr1']) yield self.brd.maybeStartBuildsOn(['bldr4']) # should be ignored yield self.brd.maybeStartBuildsOn(['bldr2']) # already queued - ignored yield self.brd.maybeStartBuildsOn(['bldr3', 'bldr2']) yield self.brd._waitForFinish() # bldr3 gets invoked twice, since it's considered to have started # already when the first call to maybeStartBuildsOn returns self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr3', 'bldr1', 'bldr2', 'bldr3']) self.checkAllCleanedUp() @defer.inlineCallbacks def test_maybeStartBuildsOn_builders_missing(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['bldr1', 'bldr2', 'bldr3']) yield self.brd.maybeStartBuildsOn(['bldr1', 'bldr2', 'bldr3']) # bldr1 is already run, so surreptitiously remove the other # two - nothing should crash, but the builders should not run self.removeBuilder('bldr2') self.removeBuilder('bldr3') yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['bldr1']) self.checkAllCleanedUp() @defer.inlineCallbacks def do_test_sortBuilders(self, prioritizeBuilders, oldestRequestTimes, expected, returnDeferred=False): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(list(oldestRequestTimes)) self.master.config.prioritizeBuilders = prioritizeBuilders def mklambda(t): # work around variable-binding issues if returnDeferred: return lambda: defer.succeed(t) return lambda: t for n, t in oldestRequestTimes.items(): if t is not None: t = epoch2datetime(t) self.builders[n].getOldestRequestTime = mklambda(t) result = yield self.brd._sortBuilders(list(oldestRequestTimes)) self.assertEqual(result, expected) self.checkAllCleanedUp() def test_sortBuilders_default_sync(self): return self.do_test_sortBuilders(None, # use the default sort dict(bldr1=777, bldr2=999, bldr3=888), ['bldr1', 'bldr3', 'bldr2']) def test_sortBuilders_default_asyn(self): return self.do_test_sortBuilders(None, # use the default sort dict(bldr1=777, bldr2=999, bldr3=888), ['bldr1', 'bldr3', 'bldr2'], returnDeferred=True) def test_sortBuilders_default_None(self): return self.do_test_sortBuilders(None, # use the default sort dict( bldr1=777, bldr2=None, bldr3=888), ['bldr1', 'bldr3', 'bldr2']) def test_sortBuilders_custom(self): def prioritizeBuilders(master, builders): self.assertIdentical(master, self.master) return sorted(builders, key=lambda b: b.name) return self.do_test_sortBuilders(prioritizeBuilders, dict(bldr1=1, bldr2=1, bldr3=1), ['bldr1', 'bldr2', 'bldr3']) def test_sortBuilders_custom_async(self): def prioritizeBuilders(master, builders): self.assertIdentical(master, self.master) return defer.succeed(sorted(builders, key=lambda b: b.name)) return self.do_test_sortBuilders(prioritizeBuilders, dict(bldr1=1, bldr2=1, bldr3=1), ['bldr1', 'bldr2', 'bldr3']) @defer.inlineCallbacks def test_sortBuilders_custom_exception(self): self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['x', 'y']) def fail(m, b): raise RuntimeError("oh noes") self.master.config.prioritizeBuilders = fail # expect to get the builders back in the same order in the event of an # exception result = yield self.brd._sortBuilders(['y', 'x']) self.assertEqual(result, ['y', 'x']) # and expect the exception to be logged self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_stopService(self): # check that stopService waits for a builder run to complete, but does not # allow a subsequent run to start self.useMock_maybeStartBuildsOnBuilder() self.addBuilders(['A', 'B']) oldMSBOB = self.brd._maybeStartBuildsOnBuilder def maybeStartBuildsOnBuilder(bldr): d = oldMSBOB(bldr) stop_d = self.brd.stopService() stop_d.addCallback(lambda _: self.maybeStartBuildsOnBuilder_calls.append('(stopped)')) d.addCallback(lambda _: self.maybeStartBuildsOnBuilder_calls.append('finished')) return d self.brd._maybeStartBuildsOnBuilder = maybeStartBuildsOnBuilder # start both builds; A should start and complete *before* the service stops, # and B should not run. yield self.brd.maybeStartBuildsOn(['A', 'B']) yield self.brd._waitForFinish() self.assertEqual(self.maybeStartBuildsOnBuilder_calls, ['A', 'finished', '(stopped)']) class TestMaybeStartBuilds(TestBRDBase): @defer.inlineCallbacks def setUp(self): super().setUp() self.startedBuilds = [] self.bldr = yield self.createBuilder('A', builderid=77) self.builders['A'] = self.bldr def assertBuildsStarted(self, exp): # munge builds_started into (worker, [brids]) builds_started = [ (worker, [br.id for br in breqs]) for (worker, breqs) in self.startedBuilds] self.assertEqual(builds_started, exp) # _maybeStartBuildsOnBuilder @defer.inlineCallbacks def do_test_maybeStartBuildsOnBuilder(self, rows=None, exp_claims=None, exp_builds=None): rows = rows or [] exp_claims = exp_claims or [] exp_builds = exp_builds or [] yield self.master.db.insertTestData(rows) yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims(exp_claims) self.assertBuildsStarted(exp_builds) @defer.inlineCallbacks def test_no_buildrequests(self): self.addWorkers({'test-worker11': 1}) yield self.do_test_maybeStartBuildsOnBuilder(exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_no_workerforbuilders(self): rows = [ fakedb.Builder(id=78, name='bldr'), fakedb.BuildRequest(id=11, buildsetid=10, builderid=78), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_limited_by_workers(self): self.addWorkers({'test-worker1': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_sorted_by_submit_time(self): # same as "limited_by_workers" but with rows swapped self.addWorkers({'test-worker1': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_limited_by_available_workers(self): self.addWorkers({'test-worker1': 0, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker2', [10])]) @defer.inlineCallbacks def test_slow_db(self): # test what happens if the "getBuildRequests" fetch takes a "long time" self.addWorkers({'test-worker1': 1}) # wrap to simulate a "long" db access old_getBuildRequests = self.master.db.buildrequests.getBuildRequests def longGetBuildRequests(*args, **kwargs): res_d = old_getBuildRequests(*args, **kwargs) long_d = defer.Deferred() long_d.addCallback(lambda _: res_d) self.reactor.callLater(0, long_d.callback, None) return long_d self.master.db.buildrequests.getBuildRequests = longGetBuildRequests rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10], exp_builds=[('test-worker1', [10])]) @defer.inlineCallbacks def test_limited_by_canStartBuild(self): """Set the 'canStartBuild' value in the config to something that limits the possible options.""" self.bldr.config.nextWorker = nth_worker(-1) pairs_tested = [] def _canStartBuild(worker, breq): result = (worker.name, breq.id) pairs_tested.append(result) allowed = [ ("test-worker1", 10), ("test-worker3", 11), ] return result in allowed self.bldr.config.canStartBuild = _canStartBuild self.addWorkers( {'test-worker1': 1, 'test-worker2': 1, 'test-worker3': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=12, buildsetid=11, builderid=77, submitted_at=140000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[ ('test-worker1', [10]), ('test-worker3', [11])]) # we expect brids in order (10-11-12), # with each searched in reverse order of workers (3-2-1) available (due # to nth_worker(-1)) self.assertEqual(pairs_tested, [ ('test-worker3', 10), ('test-worker2', 10), ('test-worker1', 10), ('test-worker3', 11), ('test-worker2', 12)]) @defer.inlineCallbacks def test_limited_by_canStartBuild_deferreds(self): # Another variant that returns Deferred types, self.bldr.config.nextWorker = nth_worker(-1) pairs_tested = [] def _canStartBuild(worker, breq): result = (worker.name, breq.id) pairs_tested.append(result) allowed = [ ("test-worker1", 10), ("test-worker3", 11), ] return defer.succeed(result in allowed) self.bldr.config.canStartBuild = _canStartBuild self.addWorkers( {'test-worker1': 1, 'test-worker2': 1, 'test-worker3': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), fakedb.BuildRequest(id=12, buildsetid=11, builderid=77, submitted_at=140000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[ ('test-worker1', [10]), ('test-worker3', [11]) ]) # we expect brids in order (10-11-12), # with worker2 unable to pair self.assertEqual(pairs_tested, [ ('test-worker3', 10), ('test-worker2', 10), ('test-worker1', 10), ('test-worker3', 11), ('test-worker2', 12)]) @defer.inlineCallbacks def test_unlimited(self): self.bldr.config.nextWorker = nth_worker(-1) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[10, 11], exp_builds=[ ('test-worker2', [10]), ('test-worker1', [11])]) @defer.inlineCallbacks def test_bldr_maybeStartBuild_fails_always(self): self.bldr.config.nextWorker = nth_worker(-1) # the builder fails to start the build; we'll see that the build # was requested, but the brids will get claimed again def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) return defer.succeed(False) self.bldr.maybeStartBuild = maybeStartBuild self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, # claimed again so none taken! exp_claims=[], exp_builds=[ ('test-worker2', [10]), ('test-worker1', [11])]) @defer.inlineCallbacks def test_bldr_maybeStartBuild_fails_once(self): self.bldr.config.nextWorker = nth_worker(-1) # the builder fails to start the build; we'll see that the build # was requested, but the brids will get claimed again start_build_results = [False, True, True] def maybeStartBuild(worker, builds): self.startedBuilds.append((worker.name, builds)) return defer.succeed(start_build_results.pop(0)) self.bldr.maybeStartBuild = maybeStartBuild self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.master.db.insertTestData(rows) # first time around, only #11 stays claimed yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims([11]) # claimed again so none taken! self.assertBuildsStarted( [('test-worker2', [10]), ('test-worker1', [11])]) # second time around the #10 will pass, adding another request and it # is claimed yield self.brd._maybeStartBuildsOnBuilder(self.bldr) self.assertMyClaims([10, 11]) self.assertBuildsStarted( [('test-worker2', [10]), ('test-worker1', [11]), ('test-worker2', [10])]) @defer.inlineCallbacks def test_limited_by_requests(self): self.bldr.config.nextWorker = nth_worker(1) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[11], exp_builds=[('test-worker2', [11])]) @defer.inlineCallbacks def test_nextWorker_None(self): self.bldr.config.nextWorker = lambda _1, _2, _3: defer.succeed(None) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextWorker_bogus(self): self.bldr.config.nextWorker = lambda _1, _2, _3: defer.succeed( mock.Mock()) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_None(self): self.bldr.config.nextBuild = lambda _1, _2: defer.succeed(None) self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_bogus(self): self.bldr.config.nextBuild = lambda _1, _2: mock.Mock() self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) @defer.inlineCallbacks def test_nextBuild_fails(self): def nextBuildRaises(*args): raise RuntimeError("xx") self.bldr.config.nextBuild = nextBuildRaises self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=11, buildsetid=11, builderid=77), ] result = self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[], exp_builds=[]) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) yield result # check concurrency edge cases @defer.inlineCallbacks def test_claim_race(self): self.bldr.config.nextWorker = nth_worker(0) # fake a race condition on the buildrequests table old_claimBuildRequests = self.master.db.buildrequests.claimBuildRequests def claimBuildRequests(brids, claimed_at=None): # first, ensure this only happens the first time self.master.db.buildrequests.claimBuildRequests = old_claimBuildRequests # claim brid 10 for some other master assert 10 in brids self.master.db.buildrequests.fakeClaimBuildRequest(10, 136000, masterid=9999) # some other masterid # ..and fail return defer.fail(buildrequests.AlreadyClaimedError()) self.master.db.buildrequests.claimBuildRequests = claimBuildRequests self.addWorkers({'test-worker1': 1, 'test-worker2': 1}) rows = self.base_rows + [ fakedb.BuildRequest(id=10, buildsetid=11, builderid=77, submitted_at=130000), # will turn out to be claimed! fakedb.BuildRequest(id=11, buildsetid=11, builderid=77, submitted_at=135000), ] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=[11], exp_builds=[('test-worker1', [11])]) # nextWorker @defer.inlineCallbacks def do_test_nextWorker(self, nextWorker, exp_choice=None, exp_warning=False): def makeBuilderConfig(): return config.BuilderConfig(name='bldrconf', workernames=['wk1', 'wk2'], builddir='bdir', factory=factory.BuildFactory(), nextWorker=nextWorker) if exp_warning: with assertProducesWarning(config.ConfigWarning, message_pattern=r"nextWorker now takes a 3rd argument"): builder_config = makeBuilderConfig() else: builder_config = makeBuilderConfig() self.bldr = yield self.createBuilder('B', builderid=78, builder_config=builder_config) for i in range(4): self.addWorkers({'test-worker%d' % i: 1}) rows = [ fakedb.SourceStamp(id=21), fakedb.Builder(id=78, name='B'), fakedb.Buildset(id=12, reason='because'), fakedb.BuildsetSourceStamp(sourcestampid=21, buildsetid=12), fakedb.BuildRequest(id=12, buildsetid=12, builderid=78), ] if exp_choice is None: exp_claims = [] exp_builds = [] else: exp_claims = [12] exp_builds = [('test-worker%d' % exp_choice, [12])] yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=exp_claims, exp_builds=exp_builds) def test_nextWorker_gets_buildrequest(self): def nextWorker(bldr, lst, br=None): self.assertNotEqual(br, None) return self.do_test_nextWorker(nextWorker) def test_nextWorker_2args_in_signature(self): def nextWorker(builder, lst): return lst[0] if lst else None return self.do_test_nextWorker(nextWorker, exp_choice=0, exp_warning=True) def test_nextWorker_default(self): self.patch(random, 'choice', nth_worker(2)) return self.do_test_nextWorker(None, exp_choice=2) def test_nextWorker_simple(self): def nextWorker(bldr, lst, br=None): self.assertIdentical(bldr, self.bldr) return lst[1] return self.do_test_nextWorker(nextWorker, exp_choice=1) def test_nextWorker_deferred(self): def nextWorker(bldr, lst, br=None): self.assertIdentical(bldr, self.bldr) return defer.succeed(lst[1]) return self.do_test_nextWorker(nextWorker, exp_choice=1) @defer.inlineCallbacks def test_nextWorker_exception(self): def nextWorker(bldr, lst, br=None): raise RuntimeError("") yield self.do_test_nextWorker(nextWorker) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) @defer.inlineCallbacks def test_nextWorker_failure(self): def nextWorker(bldr, lst, br=None): return defer.fail(failure.Failure(RuntimeError())) yield self.do_test_nextWorker(nextWorker) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) # _nextBuild @defer.inlineCallbacks def do_test_nextBuild(self, nextBuild, exp_choice=None): self.bldr.config.nextWorker = nth_worker(-1) self.bldr.config.nextBuild = nextBuild rows = self.make_workers(4) exp_claims = [] exp_builds = [] if exp_choice is not None: worker = 3 for choice in exp_choice: exp_claims.append(choice) exp_builds.append(('test-worker%d' % worker, [choice])) worker = worker - 1 yield self.do_test_maybeStartBuildsOnBuilder(rows=rows, exp_claims=sorted(exp_claims), exp_builds=exp_builds) def test_nextBuild_default(self): "default chooses the first in the list, which should be the earliest" return self.do_test_nextBuild(None, exp_choice=[10, 11, 12, 13]) def test_nextBuild_simple(self): def nextBuild(bldr, lst): self.assertIdentical(bldr, self.bldr) return lst[-1] return self.do_test_nextBuild(nextBuild, exp_choice=[13, 12, 11, 10]) def test_nextBuild_deferred(self): def nextBuild(bldr, lst): self.assertIdentical(bldr, self.bldr) return defer.succeed(lst[-1]) return self.do_test_nextBuild(nextBuild, exp_choice=[13, 12, 11, 10]) def test_nextBuild_exception(self): def nextBuild(bldr, lst): raise RuntimeError("") result = self.do_test_nextBuild(nextBuild) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) return result def test_nextBuild_failure(self): def nextBuild(bldr, lst): return defer.fail(failure.Failure(RuntimeError())) result = self.do_test_nextBuild(nextBuild) self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) return result buildbot-2.6.0/master/buildbot/test/unit/test_process_buildstep.py000066400000000000000000001314141361162603000255240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.internet.task import deferLater from twisted.python import log from twisted.trial import unittest from buildbot import locks from buildbot.interfaces import WorkerTooOldError from buildbot.plugins import util from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotecommand from buildbot.process.properties import renderer from buildbot.process.results import ALL_RESULTS from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import remotecommand as fakeremotecommand from buildbot.test.fake import worker from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import interfaces from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util.eventual import eventually class OldStyleStep(buildstep.BuildStep): def start(self): pass class NewStyleStep(buildstep.BuildStep): def run(self): pass class TestBuildStep(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): class FakeBuildStep(buildstep.BuildStep): def start(self): eventually(self.finished, 0) class SkippingBuildStep(buildstep.BuildStep): def start(self): return SKIPPED def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() # support def _setupWaterfallTest(self, hideStepIf, expect, expectedResult=SUCCESS): self.setupStep(TestBuildStep.FakeBuildStep(hideStepIf=hideStepIf)) self.expectOutcome(result=expectedResult) self.expectHidden(expect) # tests def test_nameIsntString(self): """ When BuildStep is passed a name that isn't a string, it reports a config error. """ with self.assertRaisesConfigError("BuildStep name must be a string"): buildstep.BuildStep(name=5) def test_unexpectedKeywordArgument(self): """ When BuildStep is passed an unknown keyword argument, it reports a config error. """ with self.assertRaisesConfigError( "__init__ got unexpected keyword argument(s) ['oogaBooga']"): buildstep.BuildStep(oogaBooga=5) def test_updateBuildSummaryPolicyDefaults(self): """ updateBuildSummaryPolicy builds default value according to resultsMixin parameters (flunkOnFailure..) """ step = buildstep.BuildStep() self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED, FAILURE])) step = buildstep.BuildStep(warnOnWarnings=True) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED, FAILURE, WARNINGS])) step = buildstep.BuildStep(flunkOnFailure=False) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted([ EXCEPTION, RETRY, CANCELLED])) step = buildstep.BuildStep(updateBuildSummaryPolicy=False) self.assertEqual(sorted(step.updateBuildSummaryPolicy), []) step = buildstep.BuildStep(updateBuildSummaryPolicy=True) self.assertEqual(sorted(step.updateBuildSummaryPolicy), sorted(ALL_RESULTS)) def test_updateBuildSummaryPolicyBadType(self): """ updateBuildSummaryPolicy raise ConfigError in case of bad type """ with self.assertRaisesConfigError( "BuildStep updateBuildSummaryPolicy must be a list of result ids or boolean but it is 2"): buildstep.BuildStep(updateBuildSummaryPolicy=FAILURE) def test_getProperty(self): bs = buildstep.BuildStep() bs.build = fakebuild.FakeBuild() props = bs.build.build_status.properties = mock.Mock() bs.getProperty("xyz", 'b') props.getProperty.assert_called_with("xyz", 'b') bs.getProperty("xyz") props.getProperty.assert_called_with("xyz", None) def test_setProperty(self): bs = buildstep.BuildStep() bs.build = fakebuild.FakeBuild() props = bs.build.build_status.properties = mock.Mock() bs.setProperty("x", "y", "t") props.setProperty.assert_called_with("x", "y", "t", runtime=True) bs.setProperty("x", "abc", "test", runtime=True) props.setProperty.assert_called_with("x", "abc", "test", runtime=True) @defer.inlineCallbacks def test_renderableLocks(self): lock1 = locks.MasterLock("masterlock") lock2 = locks.WorkerLock("workerlock") renderedLocks = [False] @renderer def rendered_locks(props): renderedLocks[0] = True access1 = locks.LockAccess(lock1, 'counting') access2 = locks.LockAccess(lock2, 'exclusive') return [access1, access2] self.setupStep(self.FakeBuildStep(locks=rendered_locks)) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(renderedLocks[0]) def test_compare(self): lbs1 = buildstep.LoggingBuildStep(name="me") lbs2 = buildstep.LoggingBuildStep(name="me") lbs3 = buildstep.LoggingBuildStep(name="me2") self.assertEqual(lbs1, lbs2) self.assertNotEqual(lbs1, lbs3) def test_repr(self): self.assertEqual( repr(buildstep.LoggingBuildStep(name="me")), 'LoggingBuildStep(name=' + repr("me") + ')') self.assertEqual( repr(buildstep.LoggingBuildStep({}, name="me")), 'LoggingBuildStep({}, name=' + repr("me") + ')') @defer.inlineCallbacks def test_regularLocks(self): lock1 = locks.MasterLock("masterlock") lock2 = locks.WorkerLock("workerlock") self.setupStep(self.FakeBuildStep( locks=[locks.LockAccess(lock1, 'counting'), locks.LockAccess(lock2, 'exclusive')])) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_cancelWhileLocksAvailable(self): def _owns_lock(step, lock): access = [step_access for step_lock, step_access in step.locks if step_lock == lock][0] return lock.isOwner(step, access) def _lock_available(step, lock): access = [step_access for step_lock, step_access in step.locks if step_lock == lock][0] return lock.isAvailable(step, access) lock1 = locks.MasterLock("masterlock1") real_lock1 = locks.RealMasterLock(lock1) lock2 = locks.MasterLock("masterlock2") real_lock2 = locks.RealMasterLock(lock2) stepa = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')) ])) stepb = self.setupStep(self.FakeBuildStep(locks=[ (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) stepc = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')), (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) stepd = self.setupStep(self.FakeBuildStep(locks=[ (real_lock1, locks.LockAccess(lock1, 'exclusive')), (real_lock2, locks.LockAccess(lock2, 'exclusive')) ])) # Start all the steps yield stepa.acquireLocks() yield stepb.acquireLocks() c_d = stepc.acquireLocks() d_d = stepd.acquireLocks() # Check that step a and step b have the locks self.assertTrue(_owns_lock(stepa, real_lock1)) self.assertTrue(_owns_lock(stepb, real_lock2)) # Check that step c does not have a lock self.assertFalse(_owns_lock(stepc, real_lock1)) self.assertFalse(_owns_lock(stepc, real_lock2)) # Check that step d does not have a lock self.assertFalse(_owns_lock(stepd, real_lock1)) self.assertFalse(_owns_lock(stepd, real_lock2)) # Release lock 1 stepa.releaseLocks() yield deferLater(reactor, 0, lambda: None) # lock1 should be available for step c self.assertTrue(_lock_available(stepc, real_lock1)) self.assertFalse(_lock_available(stepc, real_lock2)) self.assertFalse(_lock_available(stepd, real_lock1)) self.assertFalse(_lock_available(stepd, real_lock2)) # Cancel step c stepc.interrupt("cancelling") yield c_d # Check that step c does not have a lock self.assertFalse(_owns_lock(stepc, real_lock1)) self.assertFalse(_owns_lock(stepc, real_lock2)) # No lock should be available for step c self.assertFalse(_lock_available(stepc, real_lock1)) self.assertFalse(_lock_available(stepc, real_lock2)) # lock 1 should be available for step d self.assertTrue(_lock_available(stepd, real_lock1)) self.assertFalse(_lock_available(stepd, real_lock2)) # Release lock 2 stepb.releaseLocks() # Both locks should be available for step d self.assertTrue(_lock_available(stepd, real_lock1)) self.assertTrue(_lock_available(stepd, real_lock2)) # So it should run yield d_d # Check that step d owns the locks self.assertTrue(_owns_lock(stepd, real_lock1)) self.assertTrue(_owns_lock(stepd, real_lock2)) @defer.inlineCallbacks def test_runCommand(self): bs = buildstep.BuildStep() bs.worker = worker.FakeWorker(master=None) # master is not used here bs.remote = 'dummy' bs.build = fakebuild.FakeBuild() bs.build.builder.name = 'fake' cmd = remotecommand.RemoteShellCommand("build", ["echo", "hello"]) def run(*args, **kwargs): # check that runCommand sets step.cmd self.assertIdentical(bs.cmd, cmd) return SUCCESS cmd.run = run yield bs.runCommand(cmd) # check that step.cmd is cleared after the command runs self.assertEqual(bs.cmd, None) @defer.inlineCallbacks def test_start_returns_SKIPPED(self): self.setupStep(self.SkippingBuildStep()) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_false(self): self.setupStep(self.FakeBuildStep(doStepIf=False)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_renderable_false(self): @util.renderer def dostepif(props): return False self.setupStep(self.FakeBuildStep(doStepIf=dostepif)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_returns_false(self): self.setupStep(self.FakeBuildStep(doStepIf=lambda step: False)) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() @defer.inlineCallbacks def test_doStepIf_returns_deferred_false(self): self.setupStep(self.FakeBuildStep( doStepIf=lambda step: defer.succeed(False))) self.step.finished = mock.Mock() self.expectOutcome(result=SKIPPED, state_string='finished (skipped)') yield self.runStep() # 837: we want to specifically avoid calling finished() if skipping self.step.finished.assert_not_called() def test_hideStepIf_False(self): self._setupWaterfallTest(False, False) return self.runStep() def test_hideStepIf_True(self): self._setupWaterfallTest(True, True) return self.runStep() @defer.inlineCallbacks def test_hideStepIf_Callable_False(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, SUCCESS) return False self._setupWaterfallTest(shouldHide, False) yield self.runStep() self.assertTrue(called[0]) @defer.inlineCallbacks def test_hideStepIf_Callable_True(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, SUCCESS) return True self._setupWaterfallTest(shouldHide, True) yield self.runStep() self.assertTrue(called[0]) @defer.inlineCallbacks def test_hideStepIf_fails(self): # 0/0 causes DivideByZeroError, which should be flagged as an exception self._setupWaterfallTest( lambda x, y: 0 / 0, False, expectedResult=EXCEPTION) self.step.addLogWithFailure = mock.Mock() yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1) @defer.inlineCallbacks def test_hideStepIf_Callable_Exception(self): called = [False] def shouldHide(result, step): called[0] = True self.assertTrue(step is self.step) self.assertEqual(result, EXCEPTION) return True def createException(*args, **kwargs): raise RuntimeError() self.setupStep(self.FakeBuildStep(hideStepIf=shouldHide, doStepIf=createException)) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') self.expectHidden(True) try: yield self.runStep() except Exception as e: log.err(e) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) self.assertTrue(called[0]) @defer.inlineCallbacks def test_step_getLog(self): testcase = self class TestGetLogStep(buildstep.BuildStep): @defer.inlineCallbacks def run(self): testcase.assertRaises(KeyError, lambda: self.getLog('testy')) log1 = yield self.addLog('testy') log2 = self.getLog('testy') testcase.assertIdentical(log1, log2) return SUCCESS self.setupStep(TestGetLogStep()) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_step_renders_flunkOnFailure(self): self.setupStep( TestBuildStep.FakeBuildStep(flunkOnFailure=properties.Property('fOF'))) self.properties.setProperty('fOF', 'yes', 'test') self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.flunkOnFailure, 'yes') def test_hasStatistic(self): step = buildstep.BuildStep() self.assertFalse(step.hasStatistic('rbi')) step.setStatistic('rbi', 13) self.assertTrue(step.hasStatistic('rbi')) def test_setStatistic(self): step = buildstep.BuildStep() step.setStatistic('rbi', 13) self.assertEqual(step.getStatistic('rbi'), 13) def test_getStatistic(self): step = buildstep.BuildStep() self.assertEqual(step.getStatistic('rbi', 99), 99) self.assertEqual(step.getStatistic('rbi'), None) step.setStatistic('rbi', 13) self.assertEqual(step.getStatistic('rbi'), 13) def test_getStatistics(self): step = buildstep.BuildStep() step.setStatistic('rbi', 13) step.setStatistic('ba', 0.298) self.assertEqual(step.getStatistics(), {'rbi': 13, 'ba': 0.298}) def test_isNewStyle(self): self.assertFalse(OldStyleStep().isNewStyle()) self.assertTrue(NewStyleStep().isNewStyle()) def setup_summary_test(self): self.patch(NewStyleStep, 'getCurrentSummary', lambda self: defer.succeed({'step': 'C'})) self.patch(NewStyleStep, 'getResultSummary', lambda self: defer.succeed({'step': 'CS', 'build': 'CB'})) step = NewStyleStep() step.master = fakemaster.make_master(self, wantData=True, wantDb=True) step.stepid = 13 step.step_status = mock.Mock() step.build = fakebuild.FakeBuild() return step def test_updateSummary_running(self): step = self.setup_summary_test() step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'C') def test_updateSummary_running_empty_dict(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: {} step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'finished') def test_updateSummary_running_not_unicode(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: {'step': b'bytestring'} step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) def test_updateSummary_running_not_dict(self): step = self.setup_summary_test() step.getCurrentSummary = lambda: 'foo!' step._running = True step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) def test_updateSummary_finished(self): step = self.setup_summary_test() step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'CS') def test_updateSummary_finished_empty_dict(self): step = self.setup_summary_test() step.getResultSummary = lambda: {} step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(step.master.data.updates.stepStateString[13], 'finished') def test_updateSummary_finished_not_dict(self): step = self.setup_summary_test() step.getResultSummary = lambda: 'foo!' step._running = False step.updateSummary() self.reactor.advance(1) self.assertEqual(len(self.flushLoggedErrors(TypeError)), 1) @defer.inlineCallbacks def test_updateSummary_old_style(self): self.setupStep(OldStyleStep()) # pylint: disable=unnecessary-lambda self.step.start = lambda: self.step.updateSummary() self.expectOutcome(result=EXCEPTION) yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(AssertionError)), 1) def checkSummary(self, got, step, build=None): self.assertTrue(all(isinstance(k, str) for k in got)) self.assertTrue(all(isinstance(k, str) for k in got.values())) exp = {'step': step} if build: exp['build'] = build self.assertEqual(got, exp) def test_getCurrentSummary(self): st = buildstep.BuildStep() st.description = None self.checkSummary(st.getCurrentSummary(), 'running') def test_getCurrentSummary_description(self): st = buildstep.BuildStep() st.description = 'fooing' self.checkSummary(st.getCurrentSummary(), 'fooing') def test_getCurrentSummary_descriptionSuffix(self): st = buildstep.BuildStep() st.description = 'fooing' st.descriptionSuffix = 'bar' self.checkSummary(st.getCurrentSummary(), 'fooing bar') def test_getCurrentSummary_description_list(self): st = buildstep.BuildStep() st.description = ['foo', 'ing'] self.checkSummary(st.getCurrentSummary(), 'foo ing') def test_getCurrentSummary_descriptionSuffix_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] st.descriptionSuffix = ['bar', 'bar2'] self.checkSummary(st.getCurrentSummary(), 'foo ing bar bar2') def test_getResultSummary(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = None self.checkSummary(st.getResultSummary(), 'finished') def test_getResultSummary_description(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' self.checkSummary(st.getResultSummary(), 'fooing') def test_getResultSummary_descriptionDone(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' st.descriptionDone = 'fooed' self.checkSummary(st.getResultSummary(), 'fooed') def test_getResultSummary_descriptionSuffix(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = 'fooing' st.descriptionSuffix = 'bar' self.checkSummary(st.getResultSummary(), 'fooing bar') def test_getResultSummary_descriptionDone_and_Suffix(self): st = buildstep.BuildStep() st.results = SUCCESS st.descriptionDone = 'fooed' st.descriptionSuffix = 'bar' self.checkSummary(st.getResultSummary(), 'fooed bar') def test_getResultSummary_description_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] self.checkSummary(st.getResultSummary(), 'foo ing') def test_getResultSummary_descriptionSuffix_list(self): st = buildstep.BuildStep() st.results = SUCCESS st.description = ['foo', 'ing'] st.descriptionSuffix = ['bar', 'bar2'] self.checkSummary(st.getResultSummary(), 'foo ing bar bar2') @defer.inlineCallbacks def test_getResultSummary_descriptionSuffix_failure(self): st = buildstep.BuildStep() st.results = FAILURE st.description = 'fooing' self.checkSummary((yield st.getBuildResultSummary()), 'fooing (failure)', 'fooing (failure)') self.checkSummary(st.getResultSummary(), 'fooing (failure)') @defer.inlineCallbacks def test_getResultSummary_descriptionSuffix_skipped(self): st = buildstep.BuildStep() st.results = SKIPPED st.description = 'fooing' self.checkSummary((yield st.getBuildResultSummary()), 'fooing (skipped)') self.checkSummary(st.getResultSummary(), 'fooing (skipped)') # Test calling checkWorkerHasCommand() when worker have support for # requested remote command. def testcheckWorkerHasCommandGood(self): # patch BuildStep.workerVersion() to return success mockedWorkerVersion = mock.Mock() self.patch(buildstep.BuildStep, "workerVersion", mockedWorkerVersion) # check that no exceptions are raised buildstep.BuildStep().checkWorkerHasCommand("foo") # make sure workerVersion() was called with correct arguments mockedWorkerVersion.assert_called_once_with("foo") # Test calling checkWorkerHasCommand() when worker is to old to support # requested remote command. def testcheckWorkerHasCommandTooOld(self): # patch BuildStep.workerVersion() to return error self.patch(buildstep.BuildStep, "workerVersion", mock.Mock(return_value=None)) # make sure appropriate exception is raised step = buildstep.BuildStep() with self.assertRaisesRegex(WorkerTooOldError, "worker is too old, does not know about foo"): step.checkWorkerHasCommand("foo") @defer.inlineCallbacks def testRunRaisesException(self): step = NewStyleStep() step.master = mock.Mock() step.build = mock.Mock() step.build.builder.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[]) step.locks = [] step.renderables = [] step.build.render = defer.succeed step.master.data.updates.addStep = lambda **kwargs: defer.succeed( (0, 0, 0)) step.addLogWithFailure = lambda x: defer.succeed(None) step.run = lambda: defer.fail(RuntimeError('got exception')) res = yield step.startStep(mock.Mock()) self.assertFalse(step._running) errors = self.flushLoggedErrors() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].getErrorMessage(), 'got exception') self.assertEqual(res, EXCEPTION) class TestLoggingBuildStep(unittest.TestCase): def makeRemoteCommand(self, rc, stdout, stderr=''): cmd = fakeremotecommand.FakeRemoteCommand('cmd', {}) cmd.fakeLogData(self, 'stdio', stdout=stdout, stderr=stderr) cmd.rc = rc return cmd def test_evaluateCommand_success(self): cmd = self.makeRemoteCommand(0, "Log text", "Log text") lbs = buildstep.LoggingBuildStep() status = lbs.evaluateCommand(cmd) self.assertEqual( status, SUCCESS, "evaluateCommand returned %d, should've returned %d" % (status, SUCCESS)) def test_evaluateCommand_failed(self): cmd = self.makeRemoteCommand(23, "Log text", "") lbs = buildstep.LoggingBuildStep() status = lbs.evaluateCommand(cmd) self.assertEqual( status, FAILURE, "evaluateCommand returned %d, should've returned %d" % (status, FAILURE)) class InterfaceTests(interfaces.InterfaceTests): # ensure that steps.BuildStepMixin creates a convincing facsimile of the # real BuildStep def test_signature_attributes(self): for attr in [ 'name', 'description', 'descriptionDone', 'descriptionSuffix', 'locks', 'progressMetrics', 'useProgress', 'doStepIf', 'hideStepIf', 'haltOnFailure', 'flunkOnWarnings', 'flunkOnFailure', 'warnOnWarnings', 'warnOnFailure', 'alwaysRun', 'build', 'worker', 'step_status', 'progress', 'stopped', ]: self.assertTrue(hasattr(self.step, attr)) def test_signature_setBuild(self): @self.assertArgSpecMatches(self.step.setBuild) def setBuild(self, build): pass def test_signature_setWorker(self): @self.assertArgSpecMatches(self.step.setWorker) def setWorker(self, worker): pass def test_signature_setupProgress(self): @self.assertArgSpecMatches(self.step.setupProgress) def setupProgress(self): pass def test_signature_startStep(self): @self.assertArgSpecMatches(self.step.startStep) def startStep(self, remote): pass def test_signature_run(self): @self.assertArgSpecMatches(self.step.run) def run(self): pass def test_signature_start(self): @self.assertArgSpecMatches(self.step.start) def start(self): pass def test_signature_finished(self): @self.assertArgSpecMatches(self.step.finished) def finished(self, results): pass def test_signature_failed(self): @self.assertArgSpecMatches(self.step.failed) def failed(self, why): pass def test_signature_interrupt(self): @self.assertArgSpecMatches(self.step.interrupt) def interrupt(self, reason): pass def test_signature_describe(self): @self.assertArgSpecMatches(self.step.describe) def describe(self, done=False): pass def test_signature_setProgress(self): @self.assertArgSpecMatches(self.step.setProgress) def setProgress(self, metric, value): pass def test_signature_workerVersion(self): @self.assertArgSpecMatches(self.step.workerVersion) def workerVersion(self, command, oldversion=None): pass def test_signature_workerVersionIsOlderThan(self): @self.assertArgSpecMatches(self.step.workerVersionIsOlderThan) def workerVersionIsOlderThan(self, command, minversion): pass def test_signature_getWorkerName(self): @self.assertArgSpecMatches(self.step.getWorkerName) def getWorkerName(self): pass def test_signature_runCommand(self): @self.assertArgSpecMatches(self.step.runCommand) def runCommand(self, command): pass def test_signature_addURL(self): @self.assertArgSpecMatches(self.step.addURL) def addURL(self, name, url): pass def test_signature_addLog(self): @self.assertArgSpecMatches(self.step.addLog) def addLog(self, name, type='s', logEncoding=None): pass def test_signature_getLog(self): @self.assertArgSpecMatches(self.step.getLog) def getLog(self, name): pass def test_signature_addCompleteLog(self): @self.assertArgSpecMatches(self.step.addCompleteLog) def addCompleteLog(self, name, text): pass def test_signature_addHTMLLog(self): @self.assertArgSpecMatches(self.step.addHTMLLog) def addHTMLLog(self, name, html): pass def test_signature_addLogObserver(self): @self.assertArgSpecMatches(self.step.addLogObserver) def addLogObserver(self, logname, observer): pass class TestFakeItfc(unittest.TestCase, steps.BuildStepMixin, TestReactorMixin, InterfaceTests): def setUp(self): self.setUpTestReactor() self.setupStep(buildstep.BuildStep()) class TestRealItfc(unittest.TestCase, InterfaceTests): def setUp(self): self.step = buildstep.BuildStep() class CommandMixinExample(buildstep.CommandMixin, buildstep.BuildStep): @defer.inlineCallbacks def run(self): rv = yield self.testMethod() self.method_return_value = rv return SUCCESS class TestCommandMixin(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() self.step = CommandMixinExample() self.setupStep(self.step) def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def test_runRmdir(self): self.step.testMethod = lambda: self.step.runRmdir('/some/path') self.expectCommands( Expect('rmdir', {'dir': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_runMkdir(self): self.step.testMethod = lambda: self.step.runMkdir('/some/path') self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_runMkdir_fails(self): self.step.testMethod = lambda: self.step.runMkdir('/some/path') self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=FAILURE) yield self.runStep() @defer.inlineCallbacks def test_runMkdir_fails_no_abandon(self): self.step.testMethod = lambda: self.step.runMkdir( '/some/path', abandonOnFailure=False) self.expectCommands( Expect('mkdir', {'dir': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists_doesnt(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) @defer.inlineCallbacks def test_pathExists_logging(self): self.step.testMethod = lambda: self.step.pathExists('/some/path') self.expectCommands( Expect('stat', {'file': '/some/path', 'logEnviron': False}) + Expect.log('stdio', header='NOTE: never mind\n') + 1, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertFalse(self.step.method_return_value) self.assertEqual(self.step.getLog('stdio').header, 'NOTE: never mind\n') def test_glob(self): @defer.inlineCallbacks def testFunc(): res = yield self.step.runGlob("*.pyc") self.assertEqual(res, ["one.pyc", "two.pyc"]) self.step.testMethod = testFunc self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', ["one.pyc", "two.pyc"]) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_empty(self): self.step.testMethod = lambda: self.step.runGlob("*.pyc") self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', []) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_fail(self): self.step.testMethod = lambda: self.step.runGlob("*.pyc") self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() class ShellMixinExample(buildstep.ShellMixin, buildstep.BuildStep): # note that this is straight out of cls-buildsteps.rst def __init__(self, cleanupScript='./cleanup.sh', **kwargs): self.cleanupScript = cleanupScript kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command']) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand( command=[self.cleanupScript]) yield self.runCommand(cmd) if cmd.didFail(): cmd = yield self.makeRemoteShellCommand( command=[self.cleanupScript, '--force'], logEnviron=False) yield self.runCommand(cmd) return cmd.results() class SimpleShellCommand(buildstep.ShellMixin, buildstep.BuildStep): def __init__(self, makeRemoteShellCommandKwargs=None, **kwargs): self.makeRemoteShellCommandKwargs = makeRemoteShellCommandKwargs or {} kwargs = self.setupShellMixin(kwargs) super().__init__(**kwargs) @defer.inlineCallbacks def run(self): cmd = yield self.makeRemoteShellCommand(**self.makeRemoteShellCommandKwargs) yield self.runCommand(cmd) return cmd.results() class TestShellMixin(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() yield self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_setupShellMixin_bad_arg(self): mixin = ShellMixinExample() with self.assertRaisesConfigError( "invalid ShellMixinExample argument invarg"): mixin.setupShellMixin({'invarg': 13}) def test_setupShellMixin_prohibited_arg(self): mixin = ShellMixinExample() with self.assertRaisesConfigError( "invalid ShellMixinExample argument logfiles"): mixin.setupShellMixin({'logfiles': None}, prohibitArgs=['logfiles']) def test_setupShellMixin_not_new_style(self): self.patch(ShellMixinExample, 'isNewStyle', lambda self: False) with self.assertRaises(AssertionError): ShellMixinExample() def test_constructor_defaults(self): class MySubclass(ShellMixinExample): timeout = 9999 # ShellMixin arg self.assertEqual(MySubclass().timeout, 9999) self.assertEqual(MySubclass(timeout=88).timeout, 88) # BuildStep arg self.assertEqual(MySubclass().logEncoding, None) self.assertEqual(MySubclass(logEncoding='latin-1').logEncoding, 'latin-1') self.assertEqual(MySubclass().description, None) self.assertEqual(MySubclass(description='charming').description, ['charming']) @defer.inlineCallbacks def test_example(self): self.setupStep(ShellMixinExample(), wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', command=['./cleanup.sh']) + Expect.log('stdio', stderr="didn't go so well\n") + 1, ExpectShell(workdir='build', command=['./cleanup.sh', '--force'], logEnviron=False) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_extra_logfile(self): self.setupStep(ShellMixinExample( logfiles={'cleanup': 'cleanup.log'}), wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', command=['./cleanup.sh'], logfiles={'cleanup': 'cleanup.log'}) + Expect.log('cleanup', stdout='cleaning\ncleaned\n') + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('cleanup').stdout, 'cleaning\ncleaned\n') @defer.inlineCallbacks def test_example_build_workdir(self): self.setupStep(ShellMixinExample(), wantDefaultWorkdir=False) self.build.workdir = '/alternate' self.expectCommands( ExpectShell(workdir='/alternate', command=['./cleanup.sh']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_build_workdir_callable(self): self.setupStep(ShellMixinExample(), wantDefaultWorkdir=False) self.build.workdir = lambda x: '/alternate' self.expectCommands( ExpectShell(workdir='/alternate', command=['./cleanup.sh']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_build_workdir_rendereable(self): self.setupStep(ShellMixinExample(), wantDefaultWorkdir=False) self.build.workdir = properties.Property("myproperty") self.properties.setProperty("myproperty", "/myproperty", "test") self.expectCommands( ExpectShell(workdir='/myproperty', command=['./cleanup.sh']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_build_workdir_callable_attribute_error(self): self.setupStep(ShellMixinExample(), wantDefaultWorkdir=False) self.build.workdir = lambda x: x.p # will raise AttributeError self.expectException(buildstep.CallableAttributeError) yield self.runStep() @defer.inlineCallbacks def test_example_step_workdir(self): self.setupStep(ShellMixinExample(workdir='/alternate')) self.build.workdir = '/overridden' self.expectCommands( ExpectShell(workdir='/alternate', command=['./cleanup.sh']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_step_renderable_workdir(self): @renderer def rendered_workdir(_): return '/alternate' self.setupStep(ShellMixinExample(workdir=rendered_workdir)) self.build.workdir = '/overridden' self.expectCommands( ExpectShell(workdir='/alternate', command=['./cleanup.sh']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_override_workdir(self): # Test that makeRemoteShellCommand(workdir=X) works. self.setupStep(SimpleShellCommand( makeRemoteShellCommandKwargs={'workdir': '/alternate'}, command=['foo', properties.Property('bar', 'BAR')])) self.expectCommands( ExpectShell(workdir='/alternate', command=['foo', 'BAR']) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_env(self): self.setupStep( ShellMixinExample(env={'BAR': 'BAR'}), wantDefaultWorkdir=False) self.build.builder.config.env = {'FOO': 'FOO'} self.expectCommands( ExpectShell(workdir='build', command=['./cleanup.sh'], env={'FOO': 'FOO', 'BAR': 'BAR'}) + 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() @defer.inlineCallbacks def test_example_old_worker(self): self.setupStep(ShellMixinExample(usePTY=False, interruptSignal='DIE'), worker_version={'*': "1.1"}, wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', command=['./cleanup.sh']) + # note missing parameters 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').header, 'NOTE: worker does not allow master to override usePTY\n' 'NOTE: worker does not allow master to specify interruptSignal\n') @defer.inlineCallbacks def test_example_new_worker(self): self.setupStep(ShellMixinExample(usePTY=False, interruptSignal='DIE'), worker_version={'*': "3.0"}, wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', usePTY=False, command=['./cleanup.sh']) + # note missing parameters 0, ) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertEqual(self.step.getLog('stdio').header, '') @defer.inlineCallbacks def test_description(self): self.setupStep(SimpleShellCommand( command=['foo', properties.Property('bar', 'BAR')]), wantDefaultWorkdir=False) self.expectCommands( ExpectShell(workdir='build', command=['foo', 'BAR']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="'foo BAR'") yield self.runStep() def test_getResultSummary(self): self.setupStep(SimpleShellCommand(command=['a', ['b', 'c']])) self.assertEqual(self.step.getResultSummary(), {'step': "'a b ...'"}) buildbot-2.6.0/master/buildbot/test/unit/test_process_cache.py000066400000000000000000000040671361162603000245770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import cache class CacheManager(unittest.TestCase): def setUp(self): self.caches = cache.CacheManager() def make_config(self, **kwargs): cfg = mock.Mock() cfg.caches = kwargs return cfg def test_get_cache_idempotency(self): foo_cache = self.caches.get_cache("foo", None) bar_cache = self.caches.get_cache("bar", None) foo_cache2 = self.caches.get_cache("foo", None) self.assertIdentical(foo_cache, foo_cache2) self.assertNotIdentical(foo_cache, bar_cache) @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # load config with one cache loaded and the other not foo_cache = self.caches.get_cache("foo", None) yield self.caches.reconfigServiceWithBuildbotConfig( self.make_config(foo=5, bar=6, bing=11)) bar_cache = self.caches.get_cache("bar", None) self.assertEqual((foo_cache.max_size, bar_cache.max_size), (5, 6)) def test_get_metrics(self): self.caches.get_cache("foo", None) self.assertIn('foo', self.caches.get_metrics()) metric = self.caches.get_metrics()['foo'] for k in 'hits', 'refhits', 'misses', 'max_size': self.assertIn(k, metric) buildbot-2.6.0/master/buildbot/test/unit/test_process_debug.py000066400000000000000000000045221361162603000246160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import debug from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service class FakeManhole(service.AsyncService): pass class TestDebugServices(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = mock.Mock(name='master') self.config = config.MasterConfig() @defer.inlineCallbacks def test_reconfigService_manhole(self): master = fakemaster.make_master(self) ds = debug.DebugServices() yield ds.setServiceParent(master) yield master.startService() # start off with no manhole yield ds.reconfigServiceWithBuildbotConfig(self.config) # set a manhole, fire it up self.config.manhole = manhole = FakeManhole() yield ds.reconfigServiceWithBuildbotConfig(self.config) self.assertTrue(manhole.running) self.assertIdentical(manhole.master, master) # unset it, see it stop self.config.manhole = None yield ds.reconfigServiceWithBuildbotConfig(self.config) self.assertFalse(manhole.running) self.assertIdentical(manhole.master, None) # re-start to test stopService self.config.manhole = manhole yield ds.reconfigServiceWithBuildbotConfig(self.config) # disown the service, and see that it unregisters yield ds.disownServiceParent() self.assertFalse(manhole.running) self.assertIdentical(manhole.master, None) buildbot-2.6.0/master/buildbot/test/unit/test_process_factory.py000066400000000000000000000156341361162603000252050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from random import choice from string import ascii_uppercase from twisted.trial import unittest from buildbot.process.buildstep import BuildStep from buildbot.process.buildstep import _BuildStepFactory from buildbot.process.factory import BuildFactory from buildbot.process.factory import GNUAutoconf from buildbot.process.factory import s from buildbot.steps.shell import Configure class TestBuildFactory(unittest.TestCase): def setUp(self): self.factory = BuildFactory() def test_init(self): step = BuildStep() self.factory = BuildFactory([step]) self.assertEqual(self.factory.steps, [_BuildStepFactory(BuildStep)]) def test_addStep(self): # create a string random string that will probably not collide # with what is already in the factory string = ''.join(choice(ascii_uppercase) for x in range(6)) length = len(self.factory.steps) step = BuildStep(name=string) self.factory.addStep(step) # check if the number of nodes grew by one self.assertTrue(length + 1, len(self.factory.steps)) # check if the 'right' node added in the factory self.assertEqual(self.factory.steps[-1], _BuildStepFactory(BuildStep, name=string)) def test_addStep_deprecated_withArguments(self): """ Passing keyword arguments to L{BuildFactory.addStep} is deprecated, but pass the arguments to the first argument, to construct a step. """ self.factory.addStep(BuildStep, name='test') self.assertEqual(self.factory.steps[-1], _BuildStepFactory(BuildStep, name='test')) warnings = self.flushWarnings( [self.test_addStep_deprecated_withArguments]) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['category'], DeprecationWarning) def test_addStep_deprecated(self): """ Passing keyword arguments to L{BuildFactory.addStep} is deprecated, but pass the arguments to the first argument, to construct a step. """ self.factory.addStep(BuildStep) self.assertEqual(self.factory.steps[-1], _BuildStepFactory(BuildStep)) warnings = self.flushWarnings([self.test_addStep_deprecated]) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['category'], DeprecationWarning) def test_s(self): """ L{s} is deprecated, but pass keyword arguments to the first argument, to construct a step. """ stepFactory = s(BuildStep, name='test') self.assertEqual( stepFactory, _BuildStepFactory(BuildStep, name='test')) warnings = self.flushWarnings([self.test_s]) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['category'], DeprecationWarning) def test_addStep_notAStep(self): # This fails because object isn't adaptable to IBuildStepFactory with self.assertRaises(TypeError): self.factory.addStep(object()) def test_addStep_ArgumentsInTheWrongPlace(self): with self.assertRaises(TypeError): self.factory.addStep(BuildStep(), name="name") # this also raises a deprecation error, which we don't care about (see # test_s) self.flushWarnings() def test_addSteps(self): self.factory.addSteps([BuildStep(), BuildStep()]) self.assertEqual(self.factory.steps[-2:], [_BuildStepFactory(BuildStep), _BuildStepFactory(BuildStep)]) class TestGNUAutoconf(TestBuildFactory): def setUp(self): self.factory = GNUAutoconf(source=BuildStep()) def test_init(self): # actual initialization is already done by setUp configurePresent = False compilePresent = False checkPresent = False distcheckPresent = False for step in self.factory.steps: if isinstance(step.buildStep(), Configure): configurePresent = True # the following checks are rather hairy and should be # rewritten less implementation dependent. try: if step.buildStep().command == ['make', 'all']: compilePresent = True if step.buildStep().command == ['make', 'check']: checkPresent = True if step.buildStep().command == ['make', 'distcheck']: distcheckPresent = True except(AttributeError, KeyError): pass self.assertTrue(configurePresent) self.assertTrue(compilePresent) self.assertTrue(checkPresent) self.assertTrue(distcheckPresent) def test_init_none(self): """Default steps can be uninitialized by setting None""" self.factory = GNUAutoconf(source=BuildStep(), compile=None, test=None, distcheck=None) for step in self.factory.steps: try: cmd = step.buildStep().command self.assertNotIn(cmd, [['make', 'all'], ['make', 'check'], ['make', 'distcheck']], "Build step %s should not be present." % cmd) except(AttributeError, KeyError): pass def test_init_reconf(self): # test reconf = True self.factory = GNUAutoconf(source=BuildStep(), reconf=True) self.test_init() reconfPresent = False selfreconfPresent = False for step in self.factory.steps: try: if step.buildStep().command[0] == 'autoreconf': reconfPresent = True except(AttributeError, KeyError): pass self.assertTrue(reconfPresent) # test setting your own reconfiguration step self.factory = GNUAutoconf(source=BuildStep(), reconf=['notsoautoreconf']) self.test_init() for step in self.factory.steps: try: if step.buildStep().command == ['notsoautoreconf']: selfreconfPresent = True except(AttributeError, KeyError): pass self.assertTrue(selfreconfPresent) buildbot-2.6.0/master/buildbot/test/unit/test_process_log.py000066400000000000000000000261321361162603000243120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.process import log from buildbot.test.fake import fakemaster from buildbot.test.fake import logfile as fakelogfile from buildbot.test.util import interfaces from buildbot.test.util.misc import TestReactorMixin class Tests(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def makeLog(self, type, logEncoding='utf-8'): logid = yield self.master.data.updates.addLog( stepid=27, name='testlog', type=str(type)) return log.Log.new(self.master, 'testlog', type, logid, logEncoding) @defer.inlineCallbacks def test_creation(self): for type in 'ths': yield self.makeLog(type) def test_logDecodeFunctionFromConfig(self): otilde = '\u00f5' otilde_utf8 = otilde.encode('utf-8') otilde_latin1 = otilde.encode('latin1') invalid_utf8 = b'\xff' replacement = '\ufffd' f = log.Log._decoderFromString('latin-1') self.assertEqual(f(otilde_latin1), otilde) f = log.Log._decoderFromString('utf-8') self.assertEqual(f(otilde_utf8), otilde) self.assertEqual(f(invalid_utf8), replacement) f = log.Log._decoderFromString(lambda s: str(s[::-1])) self.assertEqual(f('abc'), 'cba') @defer.inlineCallbacks def test_updates_plain(self): _log = yield self.makeLog('t') _log.addContent('hello\n') _log.addContent('hello ') _log.addContent('cruel ') _log.addContent('world\nthis is a second line') # unfinished _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid], { 'content': ['hello\n', 'hello cruel world\n', 'this is a second line\n'], 'finished': True, 'type': 't', 'name': 'testlog', }) @defer.inlineCallbacks def test_updates_different_encoding(self): _log = yield self.makeLog('t', logEncoding='latin-1') # 0xa2 is latin-1 encoding for CENT SIGN _log.addContent('$ and \xa2\n') _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid]['content'], ['$ and \N{CENT SIGN}\n']) @defer.inlineCallbacks def test_updates_unicode_input(self): _log = yield self.makeLog('t', logEncoding='something-invalid') _log.addContent('\N{SNOWMAN}\n') _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid]['content'], ['\N{SNOWMAN}\n']) @defer.inlineCallbacks def test_subscription_plain(self): _log = yield self.makeLog('t') calls = [] _log.subscribe(lambda stream, content: calls.append((stream, content))) self.assertEqual(calls, []) yield _log.addContent('hello\n') self.assertEqual(calls, [(None, 'hello\n')]) calls = [] yield _log.addContent('hello ') self.assertEqual(calls, []) yield _log.addContent('cruel ') self.assertEqual(calls, []) yield _log.addContent('world\nthis is a second line\n') self.assertEqual(calls, [ (None, 'hello cruel world\nthis is a second line\n')]) calls = [] yield _log.finish() self.assertEqual(calls, [(None, None)]) @defer.inlineCallbacks def test_subscription_unsubscribe(self): _log = yield self.makeLog('t') sub_fn = mock.Mock() sub = _log.subscribe(sub_fn) sub.unsubscribe() yield _log.finish() sub_fn.assert_not_called() @defer.inlineCallbacks def test_subscription_stream(self): _log = yield self.makeLog('s') calls = [] _log.subscribe(lambda stream, content: calls.append((stream, content))) self.assertEqual(calls, []) yield _log.addStdout('hello\n') self.assertEqual(calls, [('o', 'hello\n')]) calls = [] yield _log.addStdout('hello ') self.assertEqual(calls, []) yield _log.addStdout('cruel ') self.assertEqual(calls, []) yield _log.addStderr('!!\n') self.assertEqual(calls, [('e', '!!\n')]) calls = [] yield _log.addHeader('**\n') self.assertEqual(calls, [('h', '**\n')]) calls = [] yield _log.addStdout('world\nthis is a second line') # unfinished self.assertEqual(calls, [ ('o', 'hello cruel world\n')]) calls = [] yield _log.finish() self.assertEqual(calls, [ ('o', 'this is a second line\n'), (None, None)]) @defer.inlineCallbacks def test_updates_stream(self): _log = yield self.makeLog('s') _log.addStdout('hello\n') _log.addStdout('hello ') _log.addStderr('oh noes!\n') _log.addStdout('cruel world\n') _log.addStderr('bad things!') # unfinished _log.finish() self.assertEqual(self.master.data.updates.logs[_log.logid], { 'content': ['ohello\n', 'eoh noes!\n', 'ohello cruel world\n', 'ebad things!\n'], 'finished': True, 'name': 'testlog', 'type': 's', }) @defer.inlineCallbacks def test_isFinished(self): _log = yield self.makeLog('s') self.assertFalse(_log.isFinished()) yield _log.finish() self.assertTrue(_log.isFinished()) @defer.inlineCallbacks def test_waitUntilFinished(self): _log = yield self.makeLog('s') d = _log.waitUntilFinished() self.assertFalse(d.called) yield _log.finish() self.assertTrue(d.called) class InterfaceTests(interfaces.InterfaceTests): # for compatibility between old-style and new-style steps, both # buildbot.status.logfile.LogFile and buildbot.process.log.StreamLog must # meet this interface, at least until support for old-style steps is # removed. # ILogFile def test_signature_addStdout(self): @self.assertArgSpecMatches(self.log.addStdout) def addStdout(self, text): pass def test_signature_addStderr(self): @self.assertArgSpecMatches(self.log.addStderr) def addStderr(self, text): pass def test_signature_addHeader(self): @self.assertArgSpecMatches(self.log.addHeader) def addHeader(self, text): pass def test_signature_finish(self): @self.assertArgSpecMatches(self.log.finish) def finish(self): pass # IStatusLog def test_signature_getName(self): @self.assertArgSpecMatches(self.log.getName) def getName(self): pass def test_getName(self): self.assertEqual(self.log.getName(), 'stdio') def test_signature_isFinished(self): @self.assertArgSpecMatches(self.log.isFinished) def isFinished(self): pass def test_signature_waitUntilFinished(self): @self.assertArgSpecMatches(self.log.waitUntilFinished) def waitUntilFinished(self): pass def test_signature_subscribe(self): @self.assertArgSpecMatches(self.log.subscribe) def subscribe(self, callback): pass def test_signature_unsubscribe(self): # method has been removed self.assertFalse(hasattr(self.log, 'unsubscribe')) def test_signature_getStep_removed(self): self.assertFalse(hasattr(self.log, 'getStep')) def test_signature_subscribeConsumer_removed(self): self.assertFalse(hasattr(self.log, 'subscribeConsumer')) def test_signature_hasContents_removed(self): self.assertFalse(hasattr(self.log, 'hasContents')) def test_signature_getText_removed(self): self.assertFalse(hasattr(self.log, 'getText')) def test_signature_readlines_removed(self): self.assertFalse(hasattr(self.log, 'readlines')) def test_signature_getTextWithHeaders_removed(self): self.assertFalse(hasattr(self.log, 'getTextWithHeaders')) def test_signature_getChunks_removed(self): self.assertFalse(hasattr(self.log, 'getChunks')) class TestProcessItfc(unittest.TestCase, InterfaceTests): def setUp(self): self.log = log.StreamLog(mock.Mock(name='master'), 'stdio', 's', 101, str) class TestFakeLogFile(unittest.TestCase, InterfaceTests): def setUp(self): step = mock.Mock(name='fake step') step.logobservers = [] self.log = fakelogfile.FakeLogFile('stdio', step) class TestErrorRaised(unittest.TestCase): def instrumentTestedLoggerForError(self, testedLog): def addRawLines(msg): d = defer.Deferred() def raiseError(_): d.errback(RuntimeError('DB has gone away')) reactor.callLater(10 ** (-6), raiseError, None) return d self.patch(testedLog, 'addRawLines', addRawLines) return testedLog @defer.inlineCallbacks def testErrorOnStreamLog(self): tested_log = self.instrumentTestedLoggerForError( log.StreamLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addStdout('msg\n') except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) @defer.inlineCallbacks def testErrorOnPlainLog(self): tested_log = self.instrumentTestedLoggerForError( log.PlainLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addContent('msg\n') except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) @defer.inlineCallbacks def testErrorOnPlainLogFlush(self): tested_log = self.instrumentTestedLoggerForError( log.PlainLog(mock.Mock(name='master'), 'stdio', 's', 101, str)) correct_error_raised = False try: yield tested_log.addContent('msg') yield tested_log.finish() except Exception as e: correct_error_raised = 'DB has gone away' in str(e) self.assertTrue(correct_error_raised) buildbot-2.6.0/master/buildbot/test/unit/test_process_logobserver.py000066400000000000000000000165451361162603000260710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process import log from buildbot.process import logobserver from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class MyLogObserver(logobserver.LogObserver): def __init__(self): self.obs = [] def outReceived(self, data): self.obs.append(('out', data)) def errReceived(self, data): self.obs.append(('err', data)) def headerReceived(self, data): self.obs.append(('hdr', data)) def finishReceived(self): self.obs.append(('fin',)) class TestLogObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = MyLogObserver() lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('world\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('HDR\n') yield _log.finish() self.assertEqual(lo.obs, [ ('out', 'hello\n'), ('err', 'cruel\n'), ('out', 'world\n'), ('out', 'multi\nline\nchunk\n'), ('hdr', 'HDR\n'), ('fin',), ]) class MyLogLineObserver(logobserver.LogLineObserver): def __init__(self): super().__init__() self.obs = [] def outLineReceived(self, data): self.obs.append(('out', data)) def errLineReceived(self, data): self.obs.append(('err', data)) def headerLineReceived(self, data): self.obs.append(('hdr', data)) def finishReceived(self): self.obs.append(('fin',)) class TestLineConsumerLogObesrver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def do_test_sequence(self, consumer): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = logobserver.LineConsumerLogObserver(consumer) lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() @defer.inlineCallbacks def test_sequence_finish(self): results = [] def consumer(): while True: try: stream, line = yield results.append((stream, line)) except GeneratorExit: results.append('finish') raise yield self.do_test_sequence(consumer) self.assertEqual(results, [ ('o', 'hello'), ('e', 'cruel'), ('o', 'multi'), ('o', 'line'), ('o', 'chunk'), ('h', 'H1'), ('h', 'H2'), 'finish', ]) @defer.inlineCallbacks def test_sequence_no_finish(self): results = [] def consumer(): while True: stream, line = yield results.append((stream, line)) yield self.do_test_sequence(consumer) self.assertEqual(results, [ ('o', 'hello'), ('e', 'cruel'), ('o', 'multi'), ('o', 'line'), ('o', 'chunk'), ('h', 'H1'), ('h', 'H2'), ]) class TestLogLineObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = MyLogLineObserver() lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() self.assertEqual(lo.obs, [ ('out', 'hello'), ('err', 'cruel'), ('out', 'multi'), ('out', 'line'), ('out', 'chunk'), ('hdr', 'H1'), ('hdr', 'H2'), ('fin',), ]) def test_old_setMaxLineLength(self): # this method is gone, but used to be documented, so it's still # callable. Just don't fail. lo = MyLogLineObserver() lo.setMaxLineLength(120939403) class TestOutputProgressObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def test_sequence(self): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo = logobserver.OutputProgressObserver('stdio') step = mock.Mock() lo.setStep(step) lo.setLog(_log) yield _log.addStdout('hello\n') step.setProgress.assert_called_with('stdio', 6) yield _log.finish() class TestBufferObserver(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True) @defer.inlineCallbacks def do_test_sequence(self, lo): logid = yield self.master.data.updates.addLog(1, 'mine', 's') _log = log.Log.new(self.master, 'mine', 's', logid, 'utf-8') lo.setLog(_log) yield _log.addStdout('hello\n') yield _log.addStderr('cruel\n') yield _log.addStdout('multi\nline\nchunk\n') yield _log.addHeader('H1\nH2\n') yield _log.finish() @defer.inlineCallbacks def test_stdout_only(self): lo = logobserver.BufferLogObserver(wantStdout=True, wantStderr=False) yield self.do_test_sequence(lo) self.assertEqual(lo.getStdout(), 'hello\nmulti\nline\nchunk\n') self.assertEqual(lo.getStderr(), '') @defer.inlineCallbacks def test_both(self): lo = logobserver.BufferLogObserver(wantStdout=True, wantStderr=True) yield self.do_test_sequence(lo) self.assertEqual(lo.getStdout(), 'hello\nmulti\nline\nchunk\n') self.assertEqual(lo.getStderr(), 'cruel\n') buildbot-2.6.0/master/buildbot/test/unit/test_process_metrics.py000066400000000000000000000205121361162603000251730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import gc import sys from twisted.internet import task from twisted.trial import unittest from buildbot.process import metrics from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestMetricBase(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.observer = metrics.MetricLogObserver() self.observer.parent = self.master = fakemaster.make_master(self) self.master.config.metrics = dict(log_interval=0, periodic_interval=0) self.observer._reactor = self.reactor self.observer.startService() self.observer.reconfigServiceWithBuildbotConfig(self.master.config) def tearDown(self): if self.observer.running: self.observer.stopService() class TestMetricCountEvent(TestMetricBase): def testIncrement(self): metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 1) metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 2) def testDecrement(self): metrics.MetricCountEvent.log('num_widgets', 1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 1) metrics.MetricCountEvent.log('num_widgets', -1) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 0) def testAbsolute(self): metrics.MetricCountEvent.log('num_widgets', 10, absolute=True) report = self.observer.asDict() self.assertEqual(report['counters']['num_widgets'], 10) def testCountMethod(self): @metrics.countMethod('foo_called') def foo(): return "foo!" for i in range(10): foo() report = self.observer.asDict() self.assertEqual(report['counters']['foo_called'], 10) class TestMetricTimeEvent(TestMetricBase): def testManualEvent(self): metrics.MetricTimeEvent.log('foo_time', 0.001) report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 0.001) def testTimer(self): clock = task.Clock() t = metrics.Timer('foo_time') t._reactor = clock t.start() clock.advance(5) t.stop() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 5) def testStartStopDecorators(self): clock = task.Clock() t = metrics.Timer('foo_time') t._reactor = clock @t.startTimer def foo(): clock.advance(5) return "foo!" @t.stopTimer def bar(): clock.advance(5) return "bar!" foo() bar() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 10) def testTimeMethod(self): clock = task.Clock() @metrics.timeMethod('foo_time', _reactor=clock) def foo(): clock.advance(5) return "foo!" foo() report = self.observer.asDict() self.assertEqual(report['timers']['foo_time'], 5) def testAverages(self): data = list(range(10)) for i in data: metrics.MetricTimeEvent.log('foo_time', i) report = self.observer.asDict() self.assertEqual( report['timers']['foo_time'], sum(data) / float(len(data))) class TestPeriodicChecks(TestMetricBase): def testPeriodicCheck(self): # fake out that there's no garbage (since we can't rely on Python # not having any garbage while running tests) self.patch(gc, 'garbage', []) clock = task.Clock() metrics.periodicCheck(_reactor=clock) clock.pump([0.1, 0.1, 0.1]) # We should have 0 reactor delay since we're using a fake clock report = self.observer.asDict() self.assertEqual(report['timers']['reactorDelay'], 0) self.assertEqual(report['counters']['gc.garbage'], 0) self.assertEqual(report['alarms']['gc.garbage'][0], 'OK') def testUncollectable(self): # make some fake garbage self.patch(gc, 'garbage', [1, 2]) clock = task.Clock() metrics.periodicCheck(_reactor=clock) clock.pump([0.1, 0.1, 0.1]) # We should have 0 reactor delay since we're using a fake clock report = self.observer.asDict() self.assertEqual(report['timers']['reactorDelay'], 0) self.assertEqual(report['counters']['gc.garbage'], 2) self.assertEqual(report['alarms']['gc.garbage'][0], 'WARN') def testGetRSS(self): self.assertTrue(metrics._get_rss() > 0) if sys.platform != 'linux': testGetRSS.skip = "only available on linux platforms" class TestReconfig(TestMetricBase): def testReconfig(self): observer = self.observer new_config = self.master.config # starts up without running tasks self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # enable log_interval new_config.metrics = dict(log_interval=10, periodic_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.log_task) self.assertEqual(observer.periodic_task, None) # disable that and enable periodic_interval new_config.metrics = dict(periodic_interval=10, log_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.periodic_task) self.assertEqual(observer.log_task, None) # Make the periodic check run self.reactor.pump([0.1]) # disable the whole listener new_config.metrics = None observer.reconfigServiceWithBuildbotConfig(new_config) self.assertFalse(observer.enabled) self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # disable both new_config.metrics = dict(periodic_interval=0, log_interval=0) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(observer.log_task, None) self.assertEqual(observer.periodic_task, None) # enable both new_config.metrics = dict(periodic_interval=10, log_interval=10) observer.reconfigServiceWithBuildbotConfig(new_config) self.assertTrue(observer.log_task) self.assertTrue(observer.periodic_task) # (service will be stopped by tearDown) class _LogObserver: def __init__(self): self.events = [] def gotEvent(self, event): self.events.append(event) class TestReports(unittest.TestCase): def testMetricCountReport(self): handler = metrics.MetricCountHandler(None) handler.handle({}, metrics.MetricCountEvent('num_foo', 1)) self.assertEqual("Counter num_foo: 1", handler.report()) self.assertEqual({"counters": {"num_foo": 1}}, handler.asDict()) def testMetricTimeReport(self): handler = metrics.MetricTimeHandler(None) handler.handle({}, metrics.MetricTimeEvent('time_foo', 1)) self.assertEqual("Timer time_foo: 1", handler.report()) self.assertEqual({"timers": {"time_foo": 1}}, handler.asDict()) def testMetricAlarmReport(self): handler = metrics.MetricAlarmHandler(None) handler.handle({}, metrics.MetricAlarmEvent( 'alarm_foo', msg='Uh oh', level=metrics.ALARM_WARN)) self.assertEqual("WARN alarm_foo: Uh oh", handler.report()) self.assertEqual( {"alarms": {"alarm_foo": ("WARN", "Uh oh")}}, handler.asDict()) buildbot-2.6.0/master/buildbot/test/unit/test_process_properties.py000066400000000000000000001771551361162603000257410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from copy import deepcopy import mock from twisted.internet import defer from twisted.python import components from twisted.trial import unittest from zope.interface import implementer from buildbot.interfaces import IProperties from buildbot.interfaces import IRenderable from buildbot.process.buildrequest import TempChange from buildbot.process.buildrequest import TempSourceStamp from buildbot.process.properties import FlattenList from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import PropertiesMixin from buildbot.process.properties import Property from buildbot.process.properties import Transform from buildbot.process.properties import WithProperties from buildbot.process.properties import _Lazy from buildbot.process.properties import _Lookup from buildbot.process.properties import _SourceStampDict from buildbot.process.properties import renderer from buildbot.test.fake.fakebuild import FakeBuild from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.properties import ConstantRenderable class FakeSource: def __init__(self): self.branch = None self.codebase = '' self.project = '' self.repository = '' self.revision = None def asDict(self): ds = { 'branch': self.branch, 'codebase': self.codebase, 'project': self.project, 'repository': self.repository, 'revision': self.revision } return ds @implementer(IRenderable) class DeferredRenderable: def __init__(self): self.d = defer.Deferred() def getRenderingFor(self, build): return self.d def callback(self, value): self.d.callback(value) class TestPropertyMap(unittest.TestCase): """ Test the behavior of PropertyMap, using the external interface provided by WithProperties. """ def setUp(self): self.props = Properties( prop_str='a-string', prop_none=None, prop_list=['a', 'b'], prop_zero=0, prop_one=1, prop_false=False, prop_true=True, prop_empty='', ) self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def doTestSimpleWithProperties(self, fmtstring, expect, **kwargs): res = yield self.build.render(WithProperties(fmtstring, **kwargs)) self.assertEqual(res, "%s" % expect) def testSimpleStr(self): return self.doTestSimpleWithProperties('%(prop_str)s', 'a-string') def testSimpleNone(self): # None is special-cased to become an empty string return self.doTestSimpleWithProperties('%(prop_none)s', '') def testSimpleList(self): return self.doTestSimpleWithProperties('%(prop_list)s', ['a', 'b']) def testSimpleZero(self): return self.doTestSimpleWithProperties('%(prop_zero)s', 0) def testSimpleOne(self): return self.doTestSimpleWithProperties('%(prop_one)s', 1) def testSimpleFalse(self): return self.doTestSimpleWithProperties('%(prop_false)s', False) def testSimpleTrue(self): return self.doTestSimpleWithProperties('%(prop_true)s', True) def testSimpleEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty)s', '') def testSimpleUnset(self): d = self.build.render(WithProperties('%(prop_nosuch)s')) return self.assertFailure(d, KeyError) def testColonMinusSet(self): return self.doTestSimpleWithProperties('%(prop_str:-missing)s', 'a-string') def testColonMinusNone(self): # None is special-cased here, too return self.doTestSimpleWithProperties('%(prop_none:-missing)s', '') def testColonMinusZero(self): return self.doTestSimpleWithProperties('%(prop_zero:-missing)s', 0) def testColonMinusOne(self): return self.doTestSimpleWithProperties('%(prop_one:-missing)s', 1) def testColonMinusFalse(self): return self.doTestSimpleWithProperties('%(prop_false:-missing)s', False) def testColonMinusTrue(self): return self.doTestSimpleWithProperties('%(prop_true:-missing)s', True) def testColonMinusEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:-missing)s', '') def testColonMinusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:-missing)s', 'missing') def testColonTildeSet(self): return self.doTestSimpleWithProperties('%(prop_str:~missing)s', 'a-string') def testColonTildeNone(self): # None is special-cased *differently* for ~: return self.doTestSimpleWithProperties('%(prop_none:~missing)s', 'missing') def testColonTildeZero(self): return self.doTestSimpleWithProperties('%(prop_zero:~missing)s', 'missing') def testColonTildeOne(self): return self.doTestSimpleWithProperties('%(prop_one:~missing)s', 1) def testColonTildeFalse(self): return self.doTestSimpleWithProperties('%(prop_false:~missing)s', 'missing') def testColonTildeTrue(self): return self.doTestSimpleWithProperties('%(prop_true:~missing)s', True) def testColonTildeEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:~missing)s', 'missing') def testColonTildeUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~missing)s', 'missing') def testColonPlusSet(self): return self.doTestSimpleWithProperties('%(prop_str:+present)s', 'present') def testColonPlusNone(self): return self.doTestSimpleWithProperties('%(prop_none:+present)s', 'present') def testColonPlusZero(self): return self.doTestSimpleWithProperties('%(prop_zero:+present)s', 'present') def testColonPlusOne(self): return self.doTestSimpleWithProperties('%(prop_one:+present)s', 'present') def testColonPlusFalse(self): return self.doTestSimpleWithProperties('%(prop_false:+present)s', 'present') def testColonPlusTrue(self): return self.doTestSimpleWithProperties('%(prop_true:+present)s', 'present') def testColonPlusEmpty(self): return self.doTestSimpleWithProperties('%(prop_empty:+present)s', 'present') def testColonPlusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:+present)s', '') @defer.inlineCallbacks def testClearTempValues(self): yield self.doTestSimpleWithProperties('', '', prop_temp=lambda b: 'present') yield self.doTestSimpleWithProperties('%(prop_temp:+present)s', '') def testTempValue(self): return self.doTestSimpleWithProperties('%(prop_temp)s', 'present', prop_temp=lambda b: 'present') def testTempValueOverrides(self): return self.doTestSimpleWithProperties('%(prop_one)s', 2, prop_one=lambda b: 2) def testTempValueColonMinusSet(self): return self.doTestSimpleWithProperties('%(prop_one:-missing)s', 2, prop_one=lambda b: 2) def testTempValueColonMinusUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:-missing)s', 'temp', prop_nosuch=lambda b: 'temp') def testTempValueColonTildeTrueSet(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'temp', prop_false=lambda b: 'temp') def testTempValueColonTildeTrueUnset(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'temp', prop_nosuch=lambda b: 'temp') def testTempValueColonTildeFalseFalse(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: False) def testTempValueColonTildeTrueFalse(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: False) def testTempValueColonTildeNoneFalse(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: False) def testTempValueColonTildeFalseZero(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: 0) def testTempValueColonTildeTrueZero(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: 0) def testTempValueColonTildeNoneZero(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: 0) def testTempValueColonTildeFalseBlank(self): return self.doTestSimpleWithProperties('%(prop_false:~nontrue)s', 'nontrue', prop_false=lambda b: '') def testTempValueColonTildeTrueBlank(self): return self.doTestSimpleWithProperties('%(prop_true:~nontrue)s', True, prop_true=lambda b: '') def testTempValueColonTildeNoneBlank(self): return self.doTestSimpleWithProperties('%(prop_nosuch:~nontrue)s', 'nontrue', prop_nosuch=lambda b: '') def testTempValuePlusSetSet(self): return self.doTestSimpleWithProperties('%(prop_one:+set)s', 'set', prop_one=lambda b: 2) def testTempValuePlusUnsetSet(self): return self.doTestSimpleWithProperties('%(prop_nosuch:+set)s', 'set', prop_nosuch=lambda b: 1) class TestInterpolateConfigure(unittest.TestCase, ConfigErrorsMixin): """ Test that Interpolate reports errors in the interpolation string at configure time. """ def test_invalid_args_and_kwargs(self): with self.assertRaisesConfigError("Interpolate takes either positional"): Interpolate("%s %(foo)s", 1, foo=2) def test_invalid_selector(self): with self.assertRaisesConfigError( "invalid Interpolate selector 'garbage'"): Interpolate("%(garbage:test)s") def test_no_selector(self): with self.assertRaisesConfigError( "invalid Interpolate substitution without selector 'garbage'"): Interpolate("%(garbage)s") def test_invalid_default_type(self): with self.assertRaisesConfigError( "invalid Interpolate default type '@'"): Interpolate("%(prop:some_prop:@wacky)s") def test_nested_invalid_selector(self): with self.assertRaisesConfigError( "invalid Interpolate selector 'garbage'"): Interpolate("%(prop:some_prop:~%(garbage:test)s)s") def test_colon_ternary_missing_delimeter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one' with delimiter ':'"): Interpolate("echo '%(prop:P:?:one)s'") def test_colon_ternary_paren_delimiter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one(:)' with delimiter ':'"): Interpolate("echo '%(prop:P:?:one(:))s'") def test_colon_ternary_hash_bad_delimeter(self): with self.assertRaisesConfigError( "invalid Interpolate ternary expression 'one' with delimiter '|'"): Interpolate("echo '%(prop:P:#?|one)s'") def test_prop_invalid_character(self): with self.assertRaisesConfigError( "Property name must be alphanumeric for prop Interpolation 'a+a'"): Interpolate("echo '%(prop:a+a)s'") def test_kw_invalid_character(self): with self.assertRaisesConfigError( "Keyword must be alphanumeric for kw Interpolation 'a+a'"): Interpolate("echo '%(kw:a+a)s'") def test_src_codebase_invalid_character(self): with self.assertRaisesConfigError( "Codebase must be alphanumeric for src Interpolation 'a+a:a'"): Interpolate("echo '%(src:a+a:a)s'") def test_src_attr_invalid_character(self): with self.assertRaisesConfigError( "Attribute must be alphanumeric for src Interpolation 'a:a+a'"): Interpolate("echo '%(src:a:a+a)s'") def test_src_missing_attr(self): with self.assertRaisesConfigError( "Must specify both codebase and attr"): Interpolate("echo '%(src:a)s'") class TestInterpolatePositional(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_string(self): command = Interpolate("test %s", "one fish") rendered = yield self.build.render(command) self.assertEqual(rendered, "test one fish") @defer.inlineCallbacks def test_twoString(self): command = Interpolate("test %s, %s", "one fish", "two fish") rendered = yield self.build.render(command) self.assertEqual(rendered, "test one fish, two fish") def test_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%s'", renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'red fish'") renderable.callback("red fish") return d @defer.inlineCallbacks def test_renderable(self): self.props.setProperty("buildername", "blue fish", "test") command = Interpolate("echo '%s'", Property("buildername")) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'blue fish'") class TestInterpolateProperties(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_properties(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("echo buildby-%(prop:buildername)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-winbld") @defer.inlineCallbacks def test_properties_newline(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("aa\n%(prop:buildername)s\nbb") rendered = yield self.build.render(command) self.assertEqual(rendered, "aa\nwinbld\nbb") @defer.inlineCallbacks def test_property_not_set(self): command = Interpolate("echo buildby-%(prop:buildername)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-") @defer.inlineCallbacks def test_property_colon_minus(self): command = Interpolate("echo buildby-%(prop:buildername:-blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-blddef") @defer.inlineCallbacks def test_deepcopy(self): # After a deepcopy, Interpolate instances used to lose track # that they didn't have a ``hasKey`` value # see http://trac.buildbot.net/ticket/3505 self.props.setProperty("buildername", "linux4", "test") command = deepcopy( Interpolate("echo buildby-%(prop:buildername:-blddef)s")) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-linux4") @defer.inlineCallbacks def test_property_colon_tilde_true(self): self.props.setProperty("buildername", "winbld", "test") command = Interpolate("echo buildby-%(prop:buildername:~blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-winbld") @defer.inlineCallbacks def test_property_colon_tilde_false(self): self.props.setProperty("buildername", "", "test") command = Interpolate("echo buildby-%(prop:buildername:~blddef)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-blddef") @defer.inlineCallbacks def test_property_colon_plus(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo %(prop:project:+projectdefined)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo projectdefined") @defer.inlineCallbacks def test_nested_property(self): self.props.setProperty("project", "so long!", "test") command = Interpolate("echo '%(prop:missing:~%(prop:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'so long!'") @defer.inlineCallbacks def test_property_substitute_recursively(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo '%(prop:no_such:-%(prop:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj1'") @defer.inlineCallbacks def test_property_colon_ternary_present(self): self.props.setProperty("project", "proj1", "test") command = Interpolate("echo %(prop:project:?:defined:missing)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo defined") @defer.inlineCallbacks def test_property_colon_ternary_missing(self): command = Interpolate("echo %(prop:project:?|defined|missing)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo missing") @defer.inlineCallbacks def test_property_colon_ternary_hash_true(self): self.props.setProperty("project", "winbld", "test") command = Interpolate("echo buildby-%(prop:project:#?:T:F)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-T") @defer.inlineCallbacks def test_property_colon_ternary_hash_false(self): self.props.setProperty("project", "", "test") command = Interpolate("echo buildby-%(prop:project:#?|T|F)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo buildby-F") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_true(self): self.props.setProperty("P", "present", "test") self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate("echo '%(prop:P:?|%(prop:one)s|%(prop:two)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj1'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_false(self): self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate("echo '%(prop:P:?|%(prop:one)s|%(prop:two)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'proj2'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_delimited_true(self): self.props.setProperty("P", "present", "test") self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate( "echo '%(prop:P:?|%(prop:one:?|true|false)s|%(prop:two:?|false|true)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'true'") @defer.inlineCallbacks def test_property_colon_ternary_substitute_recursively_delimited_false(self): self.props.setProperty("one", "proj1", "test") self.props.setProperty("two", "proj2", "test") command = Interpolate( "echo '%(prop:P:?|%(prop:one:?|true|false)s|%(prop:two:?|false|true)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'false'") class TestInterpolateSrc(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) sa = FakeSource() wfb = FakeSource() sc = FakeSource() sa.repository = 'cvs://A..' sa.codebase = 'cbA' sa.project = "Project" self.build.sources['cbA'] = sa wfb.repository = 'cvs://B..' wfb.codebase = 'cbB' wfb.project = "Project" self.build.sources['cbB'] = wfb sc.repository = 'cvs://C..' sc.codebase = 'cbC' sc.project = None self.build.sources['cbC'] = sc @defer.inlineCallbacks def test_src(self): command = Interpolate("echo %(src:cbB:repository)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_src_src(self): command = Interpolate( "echo %(src:cbB:repository)s %(src:cbB:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B.. Project") @defer.inlineCallbacks def test_src_attr_empty(self): command = Interpolate("echo %(src:cbC:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_src_attr_codebase_notfound(self): command = Interpolate("echo %(src:unknown_codebase:project)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_src_colon_plus_false(self): command = Interpolate("echo '%(src:cbD:project:+defaultrepo)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ''") @defer.inlineCallbacks def test_src_colon_plus_true(self): command = Interpolate("echo '%(src:cbB:project:+defaultrepo)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'defaultrepo'") @defer.inlineCallbacks def test_src_colon_minus(self): command = Interpolate("echo %(src:cbB:nonattr:-defaultrepo)s") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo defaultrepo") @defer.inlineCallbacks def test_src_colon_minus_false(self): command = Interpolate("echo '%(src:cbC:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ''") @defer.inlineCallbacks def test_src_colon_minus_true(self): command = Interpolate("echo '%(src:cbB:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_minus_codebase_notfound(self): command = Interpolate( "echo '%(src:unknown_codebase:project:-noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") @defer.inlineCallbacks def test_src_colon_tilde_true(self): command = Interpolate("echo '%(src:cbB:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_tilde_false(self): command = Interpolate("echo '%(src:cbC:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") @defer.inlineCallbacks def test_src_colon_tilde_false_src_as_replacement(self): command = Interpolate( "echo '%(src:cbC:project:~%(src:cbA:project)s)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'Project'") @defer.inlineCallbacks def test_src_colon_tilde_codebase_notfound(self): command = Interpolate( "echo '%(src:unknown_codebase:project:~noproject)s'") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'noproject'") class TestInterpolateKwargs(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) sa = FakeSource() sa.repository = 'cvs://A..' sa.codebase = 'cbA' sa.project = None sa.branch = "default" self.build.sources['cbA'] = sa @defer.inlineCallbacks def test_kwarg(self): command = Interpolate("echo %(kw:repository)s", repository="cvs://A..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_kwarg(self): command = Interpolate("echo %(kw:repository)s %(kw:branch)s", repository="cvs://A..", branch="default") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A.. default") @defer.inlineCallbacks def test_kwarg_not_mapped(self): command = Interpolate("echo %(kw:repository)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_minus_not_available(self): command = Interpolate("echo %(kw:repository)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_minus_not_available_default(self): command = Interpolate( "echo %(kw:repository:-cvs://A..)s", project="projectA") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_colon_minus_available(self): command = Interpolate( "echo %(kw:repository:-cvs://A..)s", repository="cvs://B..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_tilde_true(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository="cvs://A..") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://A..") @defer.inlineCallbacks def test_kwarg_colon_tilde_false(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository="") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_tilde_none(self): command = Interpolate( "echo %(kw:repository:~cvs://B..)s", repository=None) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwarg_colon_plus_false(self): command = Interpolate( "echo %(kw:repository:+cvs://B..)s", project="project") rendered = yield self.build.render(command) self.assertEqual(rendered, "echo ") @defer.inlineCallbacks def test_kwarg_colon_plus_true(self): command = Interpolate( "echo %(kw:repository:+cvs://B..)s", repository=None) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo cvs://B..") @defer.inlineCallbacks def test_kwargs_colon_minus_false_src_as_replacement(self): command = Interpolate( "echo '%(kw:text:-%(src:cbA:branch)s)s'", notext='ddd') rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'default'") @defer.inlineCallbacks def test_kwargs_renderable(self): command = Interpolate( "echo '%(kw:test)s'", test=ConstantRenderable('testing')) rendered = yield self.build.render(command) self.assertEqual(rendered, "echo 'testing'") def test_kwargs_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%(kw:test)s'", test=renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'testing'") renderable.callback('testing') def test_kwarg_deferred(self): renderable = DeferredRenderable() command = Interpolate("echo '%(kw:project)s'", project=renderable) d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'testing'") renderable.callback('testing') def test_nested_kwarg_deferred(self): renderable = DeferredRenderable() command = Interpolate( "echo '%(kw:missing:~%(kw:fishy)s)s'", missing=renderable, fishy="so long!") d = self.build.render(command) d.addCallback(self.assertEqual, "echo 'so long!'") renderable.callback(False) return d class TestWithProperties(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) def testInvalidParams(self): with self.assertRaises(ValueError): WithProperties("%s %(foo)s", 1, foo=2) @defer.inlineCallbacks def testBasic(self): # test basic substitution with WithProperties self.props.setProperty("revision", "47", "test") command = WithProperties("build-%s.tar.gz", "revision") res = yield self.build.render(command) self.assertEqual(res, "build-47.tar.gz") @defer.inlineCallbacks def testDict(self): # test dict-style substitution with WithProperties self.props.setProperty("other", "foo", "test") command = WithProperties("build-%(other)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-foo.tar.gz") @defer.inlineCallbacks def testDictColonMinus(self): # test dict-style substitution with WithProperties self.props.setProperty("prop1", "foo", "test") command = WithProperties( "build-%(prop1:-empty)s-%(prop2:-empty)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-foo-empty.tar.gz") @defer.inlineCallbacks def testDictColonPlus(self): # test dict-style substitution with WithProperties self.props.setProperty("prop1", "foo", "test") command = WithProperties( "build-%(prop1:+exists)s-%(prop2:+exists)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-exists-.tar.gz") @defer.inlineCallbacks def testEmpty(self): # None should render as '' self.props.setProperty("empty", None, "test") command = WithProperties("build-%(empty)s.tar.gz") res = yield self.build.render(command) self.assertEqual(res, "build-.tar.gz") @defer.inlineCallbacks def testRecursiveList(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = [WithProperties("%(x)s %(y)s"), "and", WithProperties("%(y)s %(x)s")] res = yield self.build.render(command) self.assertEqual(res, ["10 20", "and", "20 10"]) @defer.inlineCallbacks def testRecursiveTuple(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = (WithProperties("%(x)s %(y)s"), "and", WithProperties("%(y)s %(x)s")) res = yield self.build.render(command) self.assertEqual(res, ("10 20", "and", "20 10")) @defer.inlineCallbacks def testRecursiveDict(self): self.props.setProperty("x", 10, "test") self.props.setProperty("y", 20, "test") command = {WithProperties("%(x)s %(y)s"): WithProperties("%(y)s %(x)s")} res = yield self.build.render(command) self.assertEqual(res, {"10 20": "20 10"}) @defer.inlineCallbacks def testLambdaSubst(self): command = WithProperties('%(foo)s', foo=lambda _: 'bar') res = yield self.build.render(command) self.assertEqual(res, 'bar') @defer.inlineCallbacks def testLambdaHasattr(self): command = WithProperties('%(foo)s', foo=lambda b: b.hasProperty('x') and 'x' or 'y') res = yield self.build.render(command) self.assertEqual(res, 'y') @defer.inlineCallbacks def testLambdaOverride(self): self.props.setProperty('x', 10, 'test') command = WithProperties('%(x)s', x=lambda _: 20) res = yield self.build.render(command) self.assertEqual(res, '20') def testLambdaCallable(self): with self.assertRaises(ValueError): WithProperties('%(foo)s', foo='bar') @defer.inlineCallbacks def testLambdaUseExisting(self): self.props.setProperty('x', 10, 'test') self.props.setProperty('y', 20, 'test') command = WithProperties( '%(z)s', z=lambda props: props.getProperty('x') + props.getProperty('y')) res = yield self.build.render(command) self.assertEqual(res, '30') @defer.inlineCallbacks def testColon(self): self.props.setProperty('some:property', 10, 'test') command = WithProperties('%(some:property:-with-default)s') res = yield self.build.render(command) self.assertEqual(res, '10') @defer.inlineCallbacks def testColon_default(self): command = WithProperties('%(some:property:-with-default)s') res = yield self.build.render(command) self.assertEqual(res, 'with-default') @defer.inlineCallbacks def testColon_colon(self): command = WithProperties('%(some:property:-with:default)s') res = yield self.build.render(command) self.assertEqual(res, 'with:default') class TestProperties(unittest.TestCase): def setUp(self): self.props = Properties() def testDictBehavior(self): # note that dictionary-like behavior is deprecated and not exposed to # users! self.props.setProperty("do-tests", 1, "scheduler") self.props.setProperty("do-install", 2, "scheduler") self.assertTrue('do-tests' in self.props) self.assertEqual(self.props['do-tests'], 1) self.assertEqual(self.props['do-install'], 2) with self.assertRaises(KeyError): self.props['do-nothing'] self.assertEqual(self.props.getProperty('do-install'), 2) self.assertIn('do-tests', self.props) self.assertNotIn('missing-do-tests', self.props) def testAsList(self): self.props.setProperty("happiness", 7, "builder") self.props.setProperty("flames", True, "tester") self.assertEqual(sorted(self.props.asList()), [('flames', True, 'tester'), ('happiness', 7, 'builder')]) def testAsDict(self): self.props.setProperty("msi_filename", "product.msi", 'packager') self.props.setProperty("dmg_filename", "product.dmg", 'packager') self.assertEqual(self.props.asDict(), dict(msi_filename=('product.msi', 'packager'), dmg_filename=('product.dmg', 'packager'))) def testUpdate(self): self.props.setProperty("x", 24, "old") newprops = {'a': 1, 'b': 2} self.props.update(newprops, "new") self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') def testUpdateRuntime(self): self.props.setProperty("x", 24, "old") newprops = {'a': 1, 'b': 2} self.props.update(newprops, "new", runtime=True) self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') self.assertEqual(self.props.runtime, set(['a', 'b'])) def testUpdateFromProperties(self): self.props.setProperty("a", 94, "old") self.props.setProperty("x", 24, "old") newprops = Properties() newprops.setProperty('a', 1, "new") newprops.setProperty('b', 2, "new") self.props.updateFromProperties(newprops) self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') self.assertEqual(self.props.getProperty('a'), 1) self.assertEqual(self.props.getPropertySource('a'), 'new') def testUpdateFromPropertiesNoRuntime(self): self.props.setProperty("a", 94, "old") self.props.setProperty("b", 84, "old") self.props.setProperty("x", 24, "old") newprops = Properties() newprops.setProperty('a', 1, "new", runtime=True) newprops.setProperty('b', 2, "new", runtime=False) newprops.setProperty('c', 3, "new", runtime=True) newprops.setProperty('d', 3, "new", runtime=False) self.props.updateFromPropertiesNoRuntime(newprops) self.assertEqual(self.props.getProperty('a'), 94) self.assertEqual(self.props.getPropertySource('a'), 'old') self.assertEqual(self.props.getProperty('b'), 2) self.assertEqual(self.props.getPropertySource('b'), 'new') self.assertEqual(self.props.getProperty('c'), None) # not updated self.assertEqual(self.props.getProperty('d'), 3) self.assertEqual(self.props.getPropertySource('d'), 'new') self.assertEqual(self.props.getProperty('x'), 24) self.assertEqual(self.props.getPropertySource('x'), 'old') def test_setProperty_notJsonable(self): with self.assertRaises(TypeError): self.props.setProperty("project", object, "test") # IProperties methods def test_getProperty(self): self.props.properties['p1'] = (['p', 1], 'test') self.assertEqual(self.props.getProperty('p1'), ['p', 1]) def test_getProperty_default_None(self): self.assertEqual(self.props.getProperty('p1'), None) def test_getProperty_default(self): self.assertEqual(self.props.getProperty('p1', 2), 2) def test_hasProperty_false(self): self.assertFalse(self.props.hasProperty('x')) def test_hasProperty_true(self): self.props.properties['x'] = (False, 'test') self.assertTrue(self.props.hasProperty('x')) def test_has_key_false(self): self.assertFalse('x' in self.props) def test_setProperty(self): self.props.setProperty('x', 'y', 'test') self.assertEqual(self.props.properties['x'], ('y', 'test')) self.assertNotIn('x', self.props.runtime) def test_setProperty_runtime(self): self.props.setProperty('x', 'y', 'test', runtime=True) self.assertEqual(self.props.properties['x'], ('y', 'test')) self.assertIn('x', self.props.runtime) def test_setProperty_no_source(self): # pylint: disable=no-value-for-parameter with self.assertRaises(TypeError): self.props.setProperty('x', 'y') def test_getProperties(self): self.assertIdentical(self.props.getProperties(), self.props) def test_getBuild(self): self.assertIdentical(self.props.getBuild(), self.props.build) def test_unset_sourcestamps(self): with self.assertRaises(AttributeError): self.props.sourcestamps() def test_unset_changes(self): with self.assertRaises(AttributeError): self.props.changes() with self.assertRaises(AttributeError): self.props.files() def test_build_attributes(self): build = FakeBuild(self.props) change = TempChange({'author': 'me', 'files': ['main.c']}) ss = TempSourceStamp({'branch': 'master'}) ss.changes = [change] build.sources[''] = ss self.assertEqual(self.props.sourcestamps[0]['branch'], 'master') self.assertEqual(self.props.changes[0]['author'], 'me') self.assertEqual(self.props.files[0], 'main.c') def test_own_attributes(self): self.props.sourcestamps = [{'branch': 'master'}] self.props.changes = [{'author': 'me', 'files': ['main.c']}] self.assertEqual(self.props.sourcestamps[0]['branch'], 'master') self.assertEqual(self.props.changes[0]['author'], 'me') self.assertEqual(self.props.files[0], 'main.c') @defer.inlineCallbacks def test_render(self): @implementer(IRenderable) class Renderable: def getRenderingFor(self, props): return props.getProperty('x') + 'z' self.props.setProperty('x', 'y', 'test') res = yield self.props.render(Renderable()) self.assertEqual(res, 'yz') class MyPropertiesThing(PropertiesMixin): set_runtime_properties = True def adaptMyProperties(mp): return mp.properties components.registerAdapter(adaptMyProperties, MyPropertiesThing, IProperties) class TestPropertiesMixin(unittest.TestCase): def setUp(self): self.mp = MyPropertiesThing() self.mp.properties = mock.Mock() def test_getProperty(self): self.mp.getProperty('abc') self.mp.properties.getProperty.assert_called_with('abc', None) def xtest_getProperty_default(self): self.mp.getProperty('abc', 'def') self.mp.properties.getProperty.assert_called_with('abc', 'def') def test_hasProperty(self): self.mp.properties.hasProperty.return_value = True self.assertTrue(self.mp.hasProperty('abc')) self.mp.properties.hasProperty.assert_called_with('abc') def test_has_key(self): self.mp.properties.hasProperty.return_value = True # getattr because pep8 doesn't like calls to has_key self.assertTrue(getattr(self.mp, 'has_key')('abc')) self.mp.properties.hasProperty.assert_called_with('abc') def test_setProperty(self): self.mp.setProperty('abc', 'def', 'src') self.mp.properties.setProperty.assert_called_with('abc', 'def', 'src', runtime=True) def test_setProperty_no_source(self): # this compatibility is maintained for old code self.mp.setProperty('abc', 'def') self.mp.properties.setProperty.assert_called_with('abc', 'def', 'Unknown', runtime=True) def test_render(self): self.mp.render([1, 2]) self.mp.properties.render.assert_called_with([1, 2]) class TestProperty(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def testIntProperty(self): self.props.setProperty("do-tests", 1, "scheduler") value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, 1) @defer.inlineCallbacks def testStringProperty(self): self.props.setProperty("do-tests", "string", "scheduler") value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, "string") @defer.inlineCallbacks def testMissingProperty(self): value = Property("do-tests") res = yield self.build.render(value) self.assertEqual(res, None) @defer.inlineCallbacks def testDefaultValue(self): value = Property("do-tests", default="Hello!") res = yield self.build.render(value) self.assertEqual(res, "Hello!") @defer.inlineCallbacks def testDefaultValueNested(self): self.props.setProperty("xxx", 'yyy', "scheduler") value = Property("do-tests", default=WithProperties("a-%(xxx)s-b")) res = yield self.build.render(value) self.assertEqual(res, "a-yyy-b") @defer.inlineCallbacks def testIgnoreDefaultValue(self): self.props.setProperty("do-tests", "string", "scheduler") value = Property("do-tests", default="Hello!") res = yield self.build.render(value) self.assertEqual(res, "string") @defer.inlineCallbacks def testIgnoreFalseValue(self): self.props.setProperty("do-tests-string", "", "scheduler") self.props.setProperty("do-tests-int", 0, "scheduler") self.props.setProperty("do-tests-list", [], "scheduler") self.props.setProperty("do-tests-None", None, "scheduler") value = [Property("do-tests-string", default="Hello!"), Property("do-tests-int", default="Hello!"), Property("do-tests-list", default="Hello!"), Property("do-tests-None", default="Hello!")] res = yield self.build.render(value) self.assertEqual(res, ["Hello!"] * 4) @defer.inlineCallbacks def testDefaultWhenFalse(self): self.props.setProperty("do-tests-string", "", "scheduler") self.props.setProperty("do-tests-int", 0, "scheduler") self.props.setProperty("do-tests-list", [], "scheduler") self.props.setProperty("do-tests-None", None, "scheduler") value = [Property("do-tests-string", default="Hello!", defaultWhenFalse=False), Property( "do-tests-int", default="Hello!", defaultWhenFalse=False), Property( "do-tests-list", default="Hello!", defaultWhenFalse=False), Property("do-tests-None", default="Hello!", defaultWhenFalse=False)] res = yield self.build.render(value) self.assertEqual(res, ["", 0, [], None]) def testDeferredDefault(self): default = DeferredRenderable() value = Property("no-such-property", default) d = self.build.render(value) d.addCallback(self.assertEqual, "default-value") default.callback("default-value") return d @defer.inlineCallbacks def testFlattenList(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) res = yield self.build.render(value) self.assertEqual(res, ["string", "bla"]) @defer.inlineCallbacks def testFlattenListAdd(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) value = value + FlattenList([Property("do-tests"), ["bla"]]) res = yield self.build.render(value) self.assertEqual(res, ["string", "bla", "string", "bla"]) @defer.inlineCallbacks def testFlattenListAdd2(self): self.props.setProperty("do-tests", "string", "scheduler") value = FlattenList([Property("do-tests"), ["bla"]]) value = value + [Property("do-tests"), ["bla"]] res = yield self.build.render(value) self.assertEqual(res, ["string", "bla", "string", "bla"]) @defer.inlineCallbacks def testCompEq(self): self.props.setProperty("do-tests", "string", "scheduler") result = yield self.build.render(Property("do-tests") == "string") self.assertEqual(result, True) @defer.inlineCallbacks def testCompNe(self): self.props.setProperty("do-tests", "not-string", "scheduler") result = yield self.build.render(Property("do-tests") != "string") self.assertEqual(result, True) @defer.inlineCallbacks def testCompLt(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") < 2) self.assertEqual(result, True) @defer.inlineCallbacks def testCompLe(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") <= 2) self.assertEqual(result, True) @defer.inlineCallbacks def testCompGt(self): self.props.setProperty("do-tests", 3, "scheduler") result = yield self.build.render(Property("do-tests") > 2) self.assertEqual(result, True) @defer.inlineCallbacks def testCompGe(self): self.props.setProperty("do-tests", 3, "scheduler") result = yield self.build.render(Property("do-tests") >= 2) self.assertEqual(result, True) @defer.inlineCallbacks def testStringCompEq(self): self.props.setProperty("do-tests", "string", "scheduler") test_string = "string" result = yield self.build.render(test_string == Property("do-tests")) self.assertEqual(result, True) @defer.inlineCallbacks def testIntCompLe(self): self.props.setProperty("do-tests", 1, "scheduler") test_int = 1 result = yield self.build.render(test_int <= Property("do-tests")) self.assertEqual(result, True) @defer.inlineCallbacks def testPropCompGe(self): self.props.setProperty("do-tests", 1, "scheduler") result = yield self.build.render(Property("do-tests") >= Property("do-tests")) self.assertEqual(result, True) class TestRenderableAdapters(unittest.TestCase): """ Tests for list, tuple and dict renderers. """ def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) def test_list_deferred(self): r1 = DeferredRenderable() r2 = DeferredRenderable() d = self.build.render([r1, r2]) d.addCallback(self.assertEqual, ["lispy", "lists"]) r2.callback("lists") r1.callback("lispy") return d def test_tuple_deferred(self): r1 = DeferredRenderable() r2 = DeferredRenderable() d = self.build.render((r1, r2)) d.addCallback(self.assertEqual, ("totally", "tupled")) r2.callback("tupled") r1.callback("totally") return d def test_dict(self): r1 = DeferredRenderable() r2 = DeferredRenderable() k1 = DeferredRenderable() k2 = DeferredRenderable() d = self.build.render({k1: r1, k2: r2}) d.addCallback(self.assertEqual, {"lock": "load", "dict": "lookup"}) k1.callback("lock") r1.callback("load") k2.callback("dict") r2.callback("lookup") return d class Renderer(unittest.TestCase): def setUp(self): self.props = Properties() self.build = FakeBuild(props=self.props) @defer.inlineCallbacks def test_renderer(self): self.props.setProperty("x", "X", "test") def rend(p): return 'x%sx' % p.getProperty('x') res = yield self.build.render(renderer(rend)) self.assertEqual('xXx', res) @defer.inlineCallbacks def test_renderer_called(self): # it's tempting to try to call the decorated function. Don't do that. # It's not a function anymore. def rend(p): return 'x' with self.assertRaises(TypeError): yield self.build.render(renderer(rend)('y')) @defer.inlineCallbacks def test_renderer_decorator(self): self.props.setProperty("x", "X", "test") @renderer def rend(p): return 'x%sx' % p.getProperty('x') res = yield self.build.render(rend) self.assertEqual('xXx', res) @defer.inlineCallbacks def test_renderer_deferred(self): self.props.setProperty("x", "X", "test") def rend(p): return defer.succeed('y%sy' % p.getProperty('x')) res = yield self.build.render(renderer(rend)) self.assertEqual('yXy', res) @defer.inlineCallbacks def test_renderer_fails(self): @defer.inlineCallbacks def rend(p): raise RuntimeError("oops") with self.assertRaises(RuntimeError): yield self.build.render(renderer(rend)) @defer.inlineCallbacks def test_renderer_recursive(self): self.props.setProperty("x", "X", "test") def rend(p): return Interpolate("x%(prop:x)sx") ret = yield self.build.render(renderer(rend)) self.assertEqual('xXx', ret) def test_renderer_repr(self): @renderer def myrend(p): pass self.assertIn('renderer(', repr(myrend)) # py3 and py2 do not have the same way of repr functions # but they always contain the name of function self.assertIn('myrend', repr(myrend)) @defer.inlineCallbacks def test_renderer_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return 'x-%s-%s-%s' % (p.getProperty('x'), arg, kwarg) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_with_state_called(self): # it's tempting to try to call the decorated function. Don't do that. # It's not a function anymore. def rend(p, arg, kwarg='y'): return 'x' with self.assertRaises(TypeError): rend_with_args = renderer(rend).withArgs('a', kwarg='kw') yield self.build.render(rend_with_args('y')) @defer.inlineCallbacks def test_renderer_with_state_renders_args(self): self.props.setProperty("x", "X", "test") self.props.setProperty('arg', 'ARG', 'test2') self.props.setProperty('kw', 'KW', 'test3') def rend(p, arg, kwarg='y'): return 'x-%s-%s-%s' % (p.getProperty('x'), arg, kwarg) res = yield self.build.render( renderer(rend).withArgs(Property('arg'), kwarg=Property('kw'))) self.assertEqual('x-X-ARG-KW', res) @defer.inlineCallbacks def test_renderer_decorator_with_state(self): self.props.setProperty("x", "X", "test") @renderer def rend(p, arg, kwarg='y'): return 'x-%s-%s-%s' % (p.getProperty('x'), arg, kwarg) res = yield self.build.render(rend.withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_decorator_with_state_does_not_share_state(self): self.props.setProperty("x", "X", "test") @renderer def rend(p, *args, **kwargs): return 'x-%s-%s-%s' % (p.getProperty('x'), str(args), str(kwargs)) rend1 = rend.withArgs('a', kwarg1='kw1') rend2 = rend.withArgs('b', kwarg2='kw2') res1 = yield self.build.render(rend1) res2 = yield self.build.render(rend2) self.assertEqual('x-X-(\'a\',)-{\'kwarg1\': \'kw1\'}', res1) self.assertEqual('x-X-(\'b\',)-{\'kwarg2\': \'kw2\'}', res2) @defer.inlineCallbacks def test_renderer_deferred_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return defer.succeed('x-%s-%s-%s' % (p.getProperty('x'), arg, kwarg)) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) @defer.inlineCallbacks def test_renderer_fails_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): raise RuntimeError('oops') with self.assertRaises(RuntimeError): yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) @defer.inlineCallbacks def test_renderer_recursive_with_state(self): self.props.setProperty("x", "X", "test") def rend(p, arg, kwarg='y'): return Interpolate('x-%(prop:x)s-%(kw:arg)s-%(kw:kwarg)s', arg=arg, kwarg=kwarg) res = yield self.build.render(renderer(rend).withArgs('a', kwarg='kw')) self.assertEqual('x-X-a-kw', res) def test_renderer_repr_with_state(self): @renderer def rend(p): pass rend = rend.withArgs('a', kwarg='kw') # pylint: disable=assignment-from-no-return self.assertIn('renderer(', repr(rend)) # py3 and py2 do not have the same way of repr functions # but they always contain the name of function self.assertIn('args=[\'a\']', repr(rend)) self.assertIn('kwargs={\'kwarg\': \'kw\'}', repr(rend)) @defer.inlineCallbacks def test_interpolate_worker(self): rend = yield self.build.render(Interpolate("%(worker:test)s")) self.assertEqual(rend, "test") class Compare(unittest.TestCase): def test_WithProperties_lambda(self): self.assertNotEqual(WithProperties("%(key)s", key=lambda p: 'val'), WithProperties( "%(key)s", key=lambda p: 'val')) def rend(p): return "val" self.assertEqual( WithProperties("%(key)s", key=rend), WithProperties("%(key)s", key=rend)) self.assertNotEqual( WithProperties("%(key)s", key=rend), WithProperties("%(key)s", otherkey=rend)) def test_WithProperties_positional(self): self.assertNotEqual( WithProperties("%s", 'key'), WithProperties("%s", 'otherkey')) self.assertEqual( WithProperties("%s", 'key'), WithProperties("%s", 'key')) self.assertNotEqual( WithProperties("%s", 'key'), WithProperties("k%s", 'key')) def test_Interpolate_constant(self): self.assertNotEqual( Interpolate('some text here'), Interpolate('and other text there')) self.assertEqual( Interpolate('some text here'), Interpolate('some text here')) def test_Interpolate_positional(self): self.assertNotEqual( Interpolate('%s %s', "test", "text"), Interpolate('%s %s', "other", "text")) self.assertEqual( Interpolate('%s %s', "test", "text"), Interpolate('%s %s', "test", "text")) def test_Interpolate_kwarg(self): self.assertNotEqual( Interpolate("%(kw:test)s", test=object(), other=2), Interpolate("%(kw:test)s", test=object(), other=2)) self.assertEqual( Interpolate('testing: %(kw:test)s', test="test", other=3), Interpolate('testing: %(kw:test)s', test="test", other=3)) def test_Interpolate_worker(self): self.assertEqual( Interpolate('testing: %(worker:test)s'), Interpolate('testing: %(worker:test)s')) def test_renderer(self): self.assertNotEqual( renderer(lambda p: 'val'), renderer(lambda p: 'val')) def rend(p): return "val" self.assertEqual( renderer(rend), renderer(rend)) def test_Lookup_simple(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'other'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test'), _Lookup({'test': 5, 'other': 6}, 'test')) def test_Lookup_default(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', default='default'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', default='default'), _Lookup({'test': 5, 'other': 6}, 'test', default='default')) def test_Lookup_defaultWhenFalse(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=False), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=False), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test'), _Lookup({'test': 5, 'other': 6}, 'test', defaultWhenFalse=True)) def test_Lookup_hasKey(self): self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey=None), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test', hasKey='other-key')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key'), _Lookup({'test': 5, 'other': 6}, 'test', hasKey='has-key')) def test_Lookup_elideNoneAs(self): self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs=None), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs=''), _Lookup({'test': 5, 'other': 6}, 'test')) self.assertNotEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None'), _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='')) self.assertEqual( _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None'), _Lookup({'test': 5, 'other': 6}, 'test', elideNoneAs='got None')) def test_Lazy(self): self.assertNotEqual( _Lazy(5), _Lazy(6)) self.assertEqual( _Lazy(5), _Lazy(5)) def test_SourceStampDict(self): self.assertNotEqual( _SourceStampDict('binary'), _SourceStampDict('library')) self.assertEqual( _SourceStampDict('binary'), _SourceStampDict('binary')) class TestTransform(unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.props = Properties(propname='propvalue') def test_invalid_first_arg(self): with self.assertRaisesConfigError( "function given to Transform neither callable nor renderable"): Transform(None) @defer.inlineCallbacks def test_argless(self): t = Transform(lambda: 'abc') res = yield self.props.render(t) self.assertEqual(res, 'abc') @defer.inlineCallbacks def test_argless_renderable(self): @renderer def function(iprops): return lambda: iprops.getProperty('propname') t = Transform(function) res = yield self.props.render(t) self.assertEqual(res, 'propvalue') @defer.inlineCallbacks def test_args(self): t = Transform(lambda x, y: x + '|' + y, 'abc', Property('propname')) res = yield self.props.render(t) self.assertEqual(res, 'abc|propvalue') @defer.inlineCallbacks def test_kwargs(self): t = Transform(lambda x, y: x + '|' + y, x='abc', y=Property('propname')) res = yield self.props.render(t) self.assertEqual(res, 'abc|propvalue') def test_deferred(self): function = DeferredRenderable() arg = DeferredRenderable() kwarg = DeferredRenderable() t = Transform(function, arg, y=kwarg) d = self.props.render(t) d.addCallback(self.assertEqual, 'abc|def') function.callback(lambda x, y: x + '|' + y) arg.callback('abc') kwarg.callback('def') return d buildbot-2.6.0/master/buildbot/test/unit/test_process_remotecommand.py000066400000000000000000000165201361162603000263630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.process import remotecommand from buildbot.test.fake import logfile from buildbot.test.fake import remotecommand as fakeremotecommand from buildbot.test.util import interfaces from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.worker_transition import DeprecatedWorkerAPIWarning class TestRemoteShellCommand(unittest.TestCase): def test_obfuscated_arguments(self): command = ["echo", ("obfuscated", "real", "fake"), "test", ("obfuscated", "real2", "fake2"), ("not obfuscated", "a", "b"), ("obfuscated"), # not obfuscated ("obfuscated", "test"), # not obfuscated ("obfuscated", "1", "2", "3"), # not obfuscated) ] cmd = remotecommand.RemoteShellCommand("build", command) self.assertEqual(cmd.command, command) self.assertEqual(cmd.fake_command, ["echo", "fake", "test", "fake2", ("not obfuscated", "a", "b"), ("obfuscated"), # not obfuscated # not obfuscated ("obfuscated", "test"), # not obfuscated) ("obfuscated", "1", "2", "3"), ]) def test_not_obfuscated_arguments(self): command = "echo test" cmd = remotecommand.RemoteShellCommand("build", command) self.assertEqual(cmd.command, command) self.assertEqual(cmd.fake_command, command) # NOTE: # # This interface is considered private to Buildbot and may change without # warning in future versions. class Tests(interfaces.InterfaceTests): remoteCommandClass = None def makeRemoteCommand(self, stdioLogName='stdio'): return self.remoteCommandClass('ping', {'arg': 'val'}, stdioLogName=stdioLogName) def test_signature_RemoteCommand_constructor(self): @self.assertArgSpecMatches(self.remoteCommandClass.__init__) def __init__(self, remote_command, args, ignore_updates=False, collectStdout=False, collectStderr=False, decodeRC=None, stdioLogName='stdio'): pass def test_signature_RemoteShellCommand_constructor(self): @self.assertArgSpecMatches(self.remoteShellCommandClass.__init__) def __init__(self, workdir, command, env=None, want_stdout=1, want_stderr=1, timeout=20 * 60, maxTime=None, sigtermTime=None, logfiles=None, usePTY=None, logEnviron=True, collectStdout=False, collectStderr=False, interruptSignal=None, initialStdin=None, decodeRC=None, stdioLogName='stdio'): pass def test_signature_run(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.run) def run(self, step, conn, builder_name): pass def test_signature_useLog(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.useLog) def useLog(self, log_, closeWhenFinished=False, logfileName=None): pass def test_signature_useLogDelayed(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.useLogDelayed) def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False): pass def test_signature_interrupt(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.interrupt) def useLogDelayed(self, why): pass def test_signature_didFail(self): cmd = self.makeRemoteCommand() @self.assertArgSpecMatches(cmd.didFail) def useLogDelayed(self): pass def test_signature_logs(self): cmd = self.makeRemoteCommand() self.assertIsInstance(cmd.logs, dict) def test_signature_active(self): cmd = self.makeRemoteCommand() self.assertIsInstance(cmd.active, bool) def test_RemoteShellCommand_constructor(self): self.remoteShellCommandClass('wkdir', 'some-command') class TestRunCommand(unittest.TestCase, Tests): remoteCommandClass = remotecommand.RemoteCommand remoteShellCommandClass = remotecommand.RemoteShellCommand def test_notStdioLog(self): logname = 'notstdio' cmd = self.makeRemoteCommand(stdioLogName=logname) log = logfile.FakeLogFile(logname, 'dummy') cmd.useLog(log) cmd.addStdout('some stdout') self.assertEqual(log.stdout, 'some stdout') cmd.addStderr('some stderr') self.assertEqual(log.stderr, 'some stderr') cmd.addHeader('some header') self.assertEqual(log.header, 'some header') def test_RemoteShellCommand_usePTY_on_worker_2_16(self): cmd = remotecommand.RemoteShellCommand('workdir', 'shell') def workerVersion(command, oldversion=None): return '2.16' def workerVersionIsOlderThan(command, minversion): return ['2', '16'] < minversion.split('.') step = mock.Mock() step.workerVersionIsOlderThan = workerVersionIsOlderThan step.workerVersion = workerVersion conn = mock.Mock() conn.remoteStartCommand = mock.Mock(return_value=None) cmd.run(step, conn, 'builder') self.assertEqual(cmd.args['usePTY'], 'slave-config') class TestFakeRunCommand(unittest.TestCase, Tests): remoteCommandClass = fakeremotecommand.FakeRemoteCommand remoteShellCommandClass = fakeremotecommand.FakeRemoteShellCommand class TestWorkerTransition(unittest.TestCase): def test_RemoteShellCommand_usePTY(self): with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command') self.assertTrue(cmd.args['usePTY'] is None) with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command', usePTY=True) self.assertTrue(cmd.args['usePTY']) with assertNotProducesWarnings(DeprecatedWorkerAPIWarning): cmd = remotecommand.RemoteShellCommand( 'workdir', 'command', usePTY=False) self.assertFalse(cmd.args['usePTY']) buildbot-2.6.0/master/buildbot/test/unit/test_process_remotetransfer.py000066400000000000000000000046761361162603000266020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat import tempfile from mock import Mock from twisted.trial import unittest from buildbot.process import remotetransfer # Test buildbot.steps.remotetransfer.FileWriter class. class TestFileWriter(unittest.TestCase): # test FileWriter.__init__() method. def testInit(self): # # patch functions called in constructor # # patch os.path.exists() to always return False mockedExists = Mock(return_value=False) self.patch(os.path, "exists", mockedExists) # capture calls to os.makedirs() mockedMakedirs = Mock() self.patch(os, 'makedirs', mockedMakedirs) # capture calls to tempfile.mkstemp() mockedMkstemp = Mock(return_value=(7, "tmpname")) self.patch(tempfile, "mkstemp", mockedMkstemp) # capture calls to os.fdopen() mockedFdopen = Mock() self.patch(os, "fdopen", mockedFdopen) # # call _FileWriter constructor # destfile = os.path.join("dir", "file") remotetransfer.FileWriter(destfile, 64, stat.S_IRUSR) # # validate captured calls # absdir = os.path.dirname(os.path.abspath(os.path.join("dir", "file"))) mockedExists.assert_called_once_with(absdir) mockedMakedirs.assert_called_once_with(absdir) mockedMkstemp.assert_called_once_with(dir=absdir) mockedFdopen.assert_called_once_with(7, 'wb') class TestStringFileWriter(unittest.TestCase): def testBasic(self): sfw = remotetransfer.StringFileWriter() # StringFileWriter takes bytes or native string and outputs native strings sfw.remote_write(b'bytes') sfw.remote_write(' or str') self.assertEqual(sfw.buffer, 'bytes or str') buildbot-2.6.0/master/buildbot/test/unit/test_process_results.py000066400000000000000000000201501361162603000252240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.python import log from twisted.trial import unittest from buildbot.process import results class TestResults(unittest.TestCase): def test_Results(self): for r in results.Results: i = getattr(results, r.upper()) self.assertEqual(results.Results[i], r) def test_worst_status(self): self.assertEqual(results.WARNINGS, results.worst_status(results.SUCCESS, results.WARNINGS)) self.assertEqual(results.CANCELLED, results.worst_status(results.SKIPPED, results.CANCELLED)) def test_sort_worst_status(self): res = list(range(len(results.Results))) res.sort( key=lambda a: a if a != results.SKIPPED else -1) self.assertEqual(res, [ results.SKIPPED, results.SUCCESS, results.WARNINGS, results.FAILURE, results.EXCEPTION, results.RETRY, results.CANCELLED, ]) def do_test_carc(self, result, previousResult, newResult, terminate, haltOnFailure=None, flunkOnWarnings=None, flunkOnFailure=None, warnOnWarnings=None, warnOnFailure=None): if haltOnFailure is None: haltOnFailure = [True, False] if flunkOnWarnings is None: flunkOnWarnings = [ True, False] if flunkOnFailure is None: flunkOnFailure = [True, False] if warnOnWarnings is None: warnOnWarnings = [ True, False] if warnOnFailure is None: warnOnFailure = [True, False] for hof in haltOnFailure: for fow in flunkOnWarnings: for fof in flunkOnFailure: for wow in warnOnWarnings: for wof in warnOnFailure: self.haltOnFailure = hof self.flunkOnWarnings = fow self.flunkOnFailure = fof self.warnOnWarnings = wow self.warnOnFailure = wof nr, term = results.computeResultAndTermination( self, result, previousResult) log.msg("res=%r prevRes=%r hof=%r fow=%r fof=%r " "wow=%r wof=%r => %r %r" % (results.Results[result], results.Results[previousResult], hof, fow, fof, wow, wof, results.Results[nr], term)) self.assertEqual((nr, term), (newResult, terminate), "see test.log for details") def test_carc_success_after_success(self): self.do_test_carc(results.SUCCESS, results.SUCCESS, results.SUCCESS, False) def test_carc_success_after_warnings(self): self.do_test_carc(results.SUCCESS, results.WARNINGS, results.WARNINGS, False) def test_carc_success_after_failure(self): self.do_test_carc(results.SUCCESS, results.FAILURE, results.FAILURE, False) def test_carc_warnings_after_success(self): self.do_test_carc(results.WARNINGS, results.SUCCESS, results.WARNINGS, False, flunkOnWarnings=[False], warnOnWarnings=[True]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.SUCCESS, False, flunkOnWarnings=[False], warnOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.FAILURE, False, flunkOnWarnings=[True], warnOnWarnings=[True]) self.do_test_carc(results.WARNINGS, results.SUCCESS, results.FAILURE, False, flunkOnWarnings=[True], warnOnWarnings=[False]) def test_carc_warnings_after_warnings(self): self.do_test_carc(results.WARNINGS, results.WARNINGS, results.WARNINGS, False, flunkOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.WARNINGS, results.FAILURE, False, flunkOnWarnings=[True]) def test_carc_warnings_after_failure(self): self.do_test_carc(results.WARNINGS, results.FAILURE, results.FAILURE, False, flunkOnWarnings=[False]) self.do_test_carc(results.WARNINGS, results.FAILURE, results.FAILURE, False, flunkOnWarnings=[True]) def test_carc_failure_after_success(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.SUCCESS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True], warnOnFailure=[False]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True], warnOnFailure=[True]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.SUCCESS, hof, haltOnFailure=[hof], flunkOnFailure=[False], warnOnFailure=[False]) self.do_test_carc(results.FAILURE, results.SUCCESS, results.WARNINGS, hof, haltOnFailure=[hof], flunkOnFailure=[False], warnOnFailure=[True]) def test_carc_failure_after_warnings(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.WARNINGS, results.FAILURE, hof, haltOnFailure=[hof], flunkOnFailure=[True]) self.do_test_carc(results.FAILURE, results.WARNINGS, results.WARNINGS, hof, haltOnFailure=[hof], flunkOnFailure=[False]) def test_carc_failure_after_failure(self): for hof in False, True: self.do_test_carc(results.FAILURE, results.FAILURE, results.FAILURE, hof, haltOnFailure=[hof]) def test_carc_exception(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.EXCEPTION, prev, results.EXCEPTION, True) def test_carc_retry(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.RETRY, prev, results.RETRY, True) def test_carc_cancelled(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.CANCELLED, prev, results.CANCELLED, True) def test_carc_skipped(self): for prev in results.FAILURE, results.WARNINGS, results.SUCCESS: self.do_test_carc(results.SKIPPED, prev, prev, False) buildbot-2.6.0/master/buildbot/test/unit/test_process_users_manager.py000066400000000000000000000033701361162603000263630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.users import manager from buildbot.util import service class FakeUserManager(service.AsyncMultiService): pass class TestUserManager(unittest.TestCase): def setUp(self): self.master = mock.Mock() self.umm = manager.UserManagerManager(self.master) self.umm.startService() self.config = config.MasterConfig() def tearDown(self): self.umm.stopService() @defer.inlineCallbacks def test_reconfigServiceWithBuildbotConfig(self): # add a user manager um1 = FakeUserManager() self.config.user_managers = [um1] yield self.umm.reconfigServiceWithBuildbotConfig(self.config) self.assertTrue(um1.running) self.assertIdentical(um1.master, self.master) # and back to nothing self.config.user_managers = [] yield self.umm.reconfigServiceWithBuildbotConfig(self.config) self.assertIdentical(um1.master, None) buildbot-2.6.0/master/buildbot/test/unit/test_process_users_manual.py000066400000000000000000000257561361162603000262420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this class is known to contain cruft and will be looked at later, so # no current implementation utilizes it aside from scripts.runner. import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.users import manual from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class ManualUsersMixin: """ This class fakes out the master/db components to test the manual user managers located in process.users.manual. """ def setUpManualUsers(self): self.master = fakemaster.make_master(self, wantDb=True) class TestUsersBase(unittest.TestCase): """ Not really sure what there is to test, aside from _setUpManualUsers getting self.master set. """ class TestCommandlineUserManagerPerspective(TestReactorMixin, unittest.TestCase, ManualUsersMixin): def setUp(self): self.setUpTestReactor() self.setUpManualUsers() def call_perspective_commandline(self, *args): persp = manual.CommandlineUserManagerPerspective(self.master) return persp.perspective_commandline(*args) @defer.inlineCallbacks def test_perspective_commandline_add(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'git': 'x'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username=None, bb_password=None, git='x')) @defer.inlineCallbacks def test_perspective_commandline_update(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', None, None, None, [{'identifier': 'x', 'svn': 'y'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='y')) @defer.inlineCallbacks def test_perspective_commandline_update_bb(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username='bb_user', bb_password='hashed_bb_pass', svn='x')) @defer.inlineCallbacks def test_perspective_commandline_update_both(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('update', 'bb_user', 'hashed_bb_pass', None, [{'identifier': 'x', 'svn': 'y'}]) usdict = yield self.master.db.users.getUser(1) self.assertEqual(usdict, dict(uid=1, identifier='x', bb_username='bb_user', bb_password='hashed_bb_pass', svn='y')) @defer.inlineCallbacks def test_perspective_commandline_remove(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'h@c', 'git': 'hi '}]) yield self.call_perspective_commandline('remove', None, None, ['x'], None) res = yield self.master.db.users.getUser('x') self.assertEqual(res, None) @defer.inlineCallbacks def test_perspective_commandline_get(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) yield self.call_perspective_commandline('get', None, None, ['x'], None) res = yield self.master.db.users.getUser(1) self.assertEqual(res, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='x')) @defer.inlineCallbacks def test_perspective_commandline_get_multiple_attrs(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x', 'git': 'x@c'}]) yield self.call_perspective_commandline('get', None, None, ['x'], None) res = yield self.master.db.users.getUser(1) self.assertEqual(res, dict(uid=1, identifier='x', bb_username=None, bb_password=None, svn='x', git='x@c')) @defer.inlineCallbacks def test_perspective_commandline_add_format(self): result = yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) exp_format = "user(s) added:\nidentifier: x\nuid: 1\n\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_update_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x', 'svn': 'x'}]) result = yield self.call_perspective_commandline('update', None, None, None, [{'identifier': 'x', 'svn': 'y'}]) exp_format = 'user(s) updated:\nidentifier: x\n' self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_remove_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'h@c', 'git': 'hi '}]) result = yield self.call_perspective_commandline('remove', None, None, ['h@c'], None) exp_format = "user(s) removed:\nidentifier: h@c\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_get_format(self): yield self.call_perspective_commandline('add', None, None, None, [{'identifier': 'x@y', 'git': 'x '}]) result = yield self.call_perspective_commandline('get', None, None, ['x@y'], None) exp_format = ('user(s) found:\nbb_username: None\n' 'git: x \nidentifier: x@y\n' 'uid: 1\n\n') self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_remove_no_match_format(self): result = yield self.call_perspective_commandline( 'remove', None, None, ['x'], None) exp_format = "user(s) removed:\n" self.assertEqual(result, exp_format) @defer.inlineCallbacks def test_perspective_commandline_get_no_match_format(self): result = yield self.call_perspective_commandline('get', None, None, ['x'], None) exp_format = "user(s) found:\nno match found\n" self.assertEqual(result, exp_format) class TestCommandlineUserManager(TestReactorMixin, unittest.TestCase, ManualUsersMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpManualUsers() self.manual_component = manual.CommandlineUserManager(username="user", passwd="userpw", port="9990") yield self.manual_component.setServiceParent(self.master) def test_no_userpass(self): d = defer.maybeDeferred(manual.CommandlineUserManager) return self.assertFailure(d, AssertionError) def test_no_port(self): d = defer.maybeDeferred(manual.CommandlineUserManager, username="x", passwd="y") return self.assertFailure(d, AssertionError) @defer.inlineCallbacks def test_service(self): # patch out the pbmanager's 'register' command both to be sure # the registration is correct and to get a copy of the factory registration = mock.Mock() registration.unregister = lambda: defer.succeed(None) self.master.pbmanager = mock.Mock() def register(portstr, user, passwd, factory): self.assertEqual([portstr, user, passwd], ['9990', 'user', 'userpw']) self.got_factory = factory return defer.succeed(registration) self.master.pbmanager.register = register yield self.manual_component.startService() persp = self.got_factory(mock.Mock(), 'user') self.assertTrue( isinstance(persp, manual.CommandlineUserManagerPerspective)) yield self.manual_component.stopService() buildbot-2.6.0/master/buildbot/test/unit/test_process_users_users.py000066400000000000000000000144701361162603000261150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.users import users from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class UsersTests(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.db = self.master.db self.test_sha = users.encrypt("cancer") @defer.inlineCallbacks def test_createUserObject_no_src(self): yield users.createUserObject(self.master, "Tyler Durden", None) self.assertEqual(self.db.users.users, {}) self.assertEqual(self.db.users.users_info, {}) @defer.inlineCallbacks def test_createUserObject_unrecognized_src(self): yield users.createUserObject(self.master, "Tyler Durden", 'blah') self.assertEqual(self.db.users.users, {}) self.assertEqual(self.db.users.users_info, {}) @defer.inlineCallbacks def test_createUserObject_git(self): yield users.createUserObject(self.master, "Tyler Durden ", 'git') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden ', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="git", attr_data="Tyler Durden ")]}) @defer.inlineCallbacks def test_createUserObject_svn(self): yield users.createUserObject(self.master, "tdurden", 'svn') self.assertEqual(self.db.users.users, {1: dict(identifier='tdurden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="svn", attr_data="tdurden")]}) @defer.inlineCallbacks def test_createUserObject_hg(self): yield users.createUserObject(self.master, "Tyler Durden ", 'hg') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden ', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="hg", attr_data="Tyler Durden ")]}) @defer.inlineCallbacks def test_createUserObject_cvs(self): yield users.createUserObject(self.master, "tdurden", 'cvs') self.assertEqual(self.db.users.users, {1: dict(identifier='tdurden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="cvs", attr_data="tdurden")]}) @defer.inlineCallbacks def test_createUserObject_darcs(self): yield users.createUserObject(self.master, "tyler@mayhem.net", 'darcs') self.assertEqual(self.db.users.users, {1: dict(identifier='tyler@mayhem.net', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="darcs", attr_data="tyler@mayhem.net")]}) @defer.inlineCallbacks def test_createUserObject_bzr(self): yield users.createUserObject(self.master, "Tyler Durden", 'bzr') self.assertEqual(self.db.users.users, {1: dict(identifier='Tyler Durden', bb_username=None, bb_password=None)}) self.assertEqual(self.db.users.users_info, {1: [dict(attr_type="bzr", attr_data="Tyler Durden")]}) @defer.inlineCallbacks def test_getUserContact_found(self): self.db.insertTestData([fakedb.User(uid=1, identifier='tdurden'), fakedb.UserInfo(uid=1, attr_type='svn', attr_data='tdurden'), fakedb.UserInfo(uid=1, attr_type='email', attr_data='tyler@mayhem.net')]) contact = yield users.getUserContact(self.master, contact_types=['email'], uid=1) self.assertEqual(contact, 'tyler@mayhem.net') @defer.inlineCallbacks def test_getUserContact_key_not_found(self): self.db.insertTestData([fakedb.User(uid=1, identifier='tdurden'), fakedb.UserInfo(uid=1, attr_type='svn', attr_data='tdurden'), fakedb.UserInfo(uid=1, attr_type='email', attr_data='tyler@mayhem.net')]) contact = yield users.getUserContact(self.master, contact_types=['blargh'], uid=1) self.assertEqual(contact, None) @defer.inlineCallbacks def test_getUserContact_uid_not_found(self): contact = yield users.getUserContact(self.master, contact_types=['email'], uid=1) self.assertEqual(contact, None) def test_check_passwd(self): res = users.check_passwd("cancer", self.test_sha) self.assertEqual(res, True) buildbot-2.6.0/master/buildbot/test/unit/test_process_workerforbuilder.py000066400000000000000000000046571361162603000271300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial.unittest import TestCase from buildbot.process.workerforbuilder import AbstractWorkerForBuilder from buildbot.worker.base import AbstractWorker class TestAbstractWorkerForBuilder(TestCase): """ Tests for ``AbstractWorkerForBuilder``. """ def test_buildStarted_called(self): """ If the worker associated to worker builder has a ``buildStarted`` method, calling ``buildStarted`` on the worker builder calls the method on the worker with the workerforbuilder as an argument. """ class ConcreteWorker(AbstractWorker): _buildStartedCalls = [] def buildStarted(self, workerforbuilder): self._buildStartedCalls.append(workerforbuilder) worker = ConcreteWorker("worker", "pass") workerforbuilder = AbstractWorkerForBuilder() # FIXME: This should call attached, instead of setting the attribute # directly workerforbuilder.worker = worker workerforbuilder.buildStarted() self.assertEqual(ConcreteWorker._buildStartedCalls, [workerforbuilder]) def test_buildStarted_missing(self): """ If the worker associated to worker builder doesn't not have a ``buildStarted`` method, calling ``buildStarted`` on the worker builder doesn't raise an exception. """ class ConcreteWorker(AbstractWorker): pass worker = ConcreteWorker("worker", "pass") workerforbuilder = AbstractWorkerForBuilder() # FIXME: This should call attached, instead of setting the attribute # directly workerforbuilder.worker = worker # The following shouldn't raise an exception. workerforbuilder.buildStarted() buildbot-2.6.0/master/buildbot/test/unit/test_reporter_bitbucket.py000066400000000000000000000161001361162603000256630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.bitbucket import _BASE_URL from buildbot.reporters.bitbucket import _OAUTH_URL from buildbot.reporters.bitbucket import BitbucketStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestBitbucketStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): TEST_REPO = 'https://example.org/user/repo' @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, _BASE_URL, debug=None, verify=None) self.oauthhttp = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, _OAUTH_URL, auth=('key', 'secret'), debug=None, verify=None) self.bsp = bsp = BitbucketStatusPush( Interpolate('key'), Interpolate('secret')) yield bsp.setServiceParent(self.master) yield bsp.startService() @defer.inlineCallbacks def tearDown(self): yield self.bsp.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(('builds', 20)) return build @defer.inlineCallbacks def test_basic(self): build = yield self.setupBuildResults(SUCCESS) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Builder0'}, code=201) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Builder0'}, code=201) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'FAILED', 'key': 'Builder0', 'name': 'Builder0'}, code=201) build['complete'] = False self.bsp.buildStarted(('build', 20, 'started'), build) build['complete'] = True self.bsp.buildFinished(('build', 20, 'finished'), build) build['results'] = FAILURE self.bsp.buildFinished(('build', 20, 'finished'), build) @defer.inlineCallbacks def test_unable_to_authenticate(self): build = yield self.setupBuildResults(SUCCESS) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, code=400, content_json={ "error_description": "Unsupported grant type: None", "error": "invalid_grant"}) self.setUpLogging() self.bsp.buildStarted(('build', 20, 'started'), build) self.assertLogged('400: unable to authenticate to Bitbucket') @defer.inlineCallbacks def test_unable_to_send_status(self): build = yield self.setupBuildResults(SUCCESS) self.oauthhttp.expect('post', '', data={'grant_type': 'client_credentials'}, content_json={'access_token': 'foo'}) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/user/repo/commit/d34db33fd43db33f/statuses/build', json={ 'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Builder0'}, code=404, content_json={ "error_description": "This commit is unknown to us", "error": "invalid_commit"}), self.setUpLogging() self.bsp.buildStarted(('build', 20, 'started'), build) self.assertLogged('404: unable to upload Bitbucket status') self.assertLogged('This commit is unknown to us') self.assertLogged('invalid_commit') class TestBitbucketStatusPushRepoParsing(unittest.TestCase): def parse(self, repourl): return tuple(BitbucketStatusPush.get_owner_and_repo(repourl)) def test_parse_no_scheme(self): self.assertEqual( ('user', 'repo'), self.parse('git@bitbucket.com:user/repo.git')) self.assertEqual( ('user', 'repo'), self.parse('git@bitbucket.com:user/repo')) def test_parse_with_scheme(self): self.assertEqual(('user', 'repo'), self.parse( 'https://bitbucket.com/user/repo.git')) self.assertEqual( ('user', 'repo'), self.parse('https://bitbucket.com/user/repo')) self.assertEqual(('user', 'repo'), self.parse( 'ssh://git@bitbucket.com/user/repo.git')) self.assertEqual( ('user', 'repo'), self.parse('ssh://git@bitbucket.com/user/repo')) buildbot-2.6.0/master/buildbot/test/unit/test_reporter_bitbucketserver.py000066400000000000000000000322531361162603000271210ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.bitbucketserver import HTTP_CREATED from buildbot.reporters.bitbucketserver import HTTP_PROCESSED from buildbot.reporters.bitbucketserver import BitbucketServerPRCommentPush from buildbot.reporters.bitbucketserver import BitbucketServerStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.notifier import NotifierTestMixin from buildbot.test.util.reporter import ReporterTestMixin HTTP_NOT_FOUND = 404 class TestBitbucketServerStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): @defer.inlineCallbacks def setupReporter(self, **kwargs): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'serv', auth=('username', 'passwd'), debug=None, verify=None) self.sp = sp = BitbucketServerStatusPush( "serv", Interpolate("username"), Interpolate("passwd"), **kwargs) yield sp.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build def _check_start_and_finish_build(self, build): # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'description': 'Build started.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'SUCCESSFUL', 'key': 'Builder0', 'description': 'Build done.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'FAILED', 'key': 'Builder0', 'description': 'Build done.'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_basic(self): self.setupReporter() build = yield self.setupBuildResults(SUCCESS) self._check_start_and_finish_build(build) @defer.inlineCallbacks def test_setting_options(self): self.setupReporter(statusName='Build', startDescription='Build started.', endDescription='Build finished.') build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'name': 'Build', 'description': 'Build started.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'SUCCESSFUL', 'key': 'Builder0', 'name': 'Build', 'description': 'Build finished.'}, code=HTTP_PROCESSED) self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'FAILED', 'key': 'Builder0', 'name': 'Build', 'description': 'Build finished.'}, code=HTTP_PROCESSED) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_error(self): self.setupReporter() build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/rest/build-status/1.0/commits/d34db33fd43db33f', json={'url': 'http://localhost:8080/#builders/79/builds/0', 'state': 'INPROGRESS', 'key': 'Builder0', 'description': 'Build started.'}, code=HTTP_NOT_FOUND, content_json={ "error_description": "This commit is unknown to us", "error": "invalid_commit"}) build['complete'] = False self.setUpLogging() self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged('404: Unable to send Bitbucket Server status') @defer.inlineCallbacks def test_basic_with_no_revision(self): yield self.setupReporter() old_test_revision = self.TEST_REVISION try: self.TEST_REVISION = None build = yield self.setupBuildResults(SUCCESS) finally: self.TEST_REVISION = old_test_revision self.setUpLogging() # we don't expect any request build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged("Unable to get the commit hash") build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) UNICODE_BODY = "body: \u00E5\u00E4\u00F6 text" EXPECTED_API = '/rest/api/1.0/projects/PRO/repos/myrepo/pull-requests/20/comments' PR_URL = "http://example.com/projects/PRO/repos/myrepo/pull-requests/20" class TestBitbucketServerPRCommentPush(TestReactorMixin, unittest.TestCase, NotifierTestMixin, LoggingMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def setupReporter(self, verbose=True, **kwargs): self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'serv', auth=('username', 'passwd'), debug=None, verify=None) self.cp = BitbucketServerPRCommentPush( "serv", Interpolate("username"), Interpolate("passwd"), verbose=verbose, **kwargs) yield self.cp.setServiceParent(self.master) self.cp.messageFormatter = Mock(spec=self.cp.messageFormatter) self.cp.messageFormatter.formatMessageForBuildResults.return_value = \ {"body": UNICODE_BODY, "type": "text"} @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults, set_pr=True): buildset, builds = yield super().setupBuildResults(buildResults) if set_pr: yield self.master.db.builds.setBuildProperty( 20, "pullrequesturl", PR_URL, "test") return (buildset, builds) @defer.inlineCallbacks def test_reporter_basic(self): yield self.setupReporter() _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) build["complete"] = True self.setUpLogging() yield self.cp.buildComplete(("build", 20, "finished"), build) self.assertLogged('Comment sent to {}'.format(PR_URL)) @defer.inlineCallbacks def test_reporter_basic_without_logging(self): yield self.setupReporter(verbose=False) _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) build["complete"] = True self.setUpLogging() yield self.cp.buildComplete(("build", 20, "finished"), build) self.assertNotLogged('Comment sent to {}'.format(PR_URL)) @defer.inlineCallbacks def test_reporter_without_pullrequest(self): yield self.setupReporter() _, builds = yield self.setupBuildResults(SUCCESS, set_pr=False) build = builds[0] build["complete"] = True # we don't expect any request yield self.cp.buildComplete(("builds", 20, "finished"), build) @defer.inlineCallbacks def test_missing_worker_does_nothing(self): yield self.setupReporter() self.cp.workerMissing(("workers", 13, "missing"), 13) @defer.inlineCallbacks def test_reporter_with_buildset(self): yield self.setupReporter(buildSetSummary=True) buildset, _ = yield self.setupBuildResults(SUCCESS) self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=HTTP_CREATED) yield self.cp.buildsetComplete(("buildsets", 20, "complete"), buildset) @defer.inlineCallbacks def test_reporter_logs_error_code_and_content_on_invalid_return_code(self): yield self.setupReporter() _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] http_error_code = 500 error_body = {"errors": [ {"message": "A dataXXXbase error has occurred."}]} self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_error_code, content_json=error_body) self.setUpLogging() build['complete'] = True yield self.cp.buildComplete(("builds", 20, "finished"), build) self.assertLogged( "^{}: Unable to send a comment: ".format(http_error_code)) self.assertLogged("A dataXXXbase error has occurred") @defer.inlineCallbacks def test_reporter_logs_error_code_without_content_on_invalid_return_code(self): yield self.setupReporter() _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] http_error_code = 503 self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_error_code) self.setUpLogging() build['complete'] = True yield self.cp.buildComplete(("builds", 20, "finished"), build) self.assertLogged("^{}: Unable to send a comment: ".format( http_error_code)) @defer.inlineCallbacks def test_reporter_does_not_log_return_code_on_valid_return_code( self): yield self.setupReporter() _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] http_code = 201 self._http.expect( "post", EXPECTED_API, json={"text": UNICODE_BODY}, code=http_code) self.setUpLogging() build['complete'] = True yield self.cp.buildComplete(("builds", 20, "finished"), build) self.assertNotLogged("^{}:".format(http_code)) buildbot-2.6.0/master/buildbot/test/unit/test_reporter_gerrit.py000066400000000000000000000544731361162603000252220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from distutils.version import LooseVersion from mock import Mock from mock import call from twisted.internet import defer from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.reporters.gerrit import GERRIT_LABEL_REVIEWED from buildbot.reporters.gerrit import GERRIT_LABEL_VERIFIED from buildbot.reporters.gerrit import GerritStatusPush from buildbot.reporters.gerrit import defaultReviewCB from buildbot.reporters.gerrit import defaultSummaryCB from buildbot.reporters.gerrit import makeReviewResult from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin warnings.filterwarnings('error', message='.*Gerrit status') def sampleReviewCB(builderName, build, result, status, arg): verified = 1 if result == SUCCESS else -1 return makeReviewResult(str({'name': builderName, 'result': result}), (GERRIT_LABEL_VERIFIED, verified)) @defer.inlineCallbacks def sampleReviewCBDeferred(builderName, build, result, status, arg): verified = 1 if result == SUCCESS else -1 result = yield makeReviewResult(str({'name': builderName, 'result': result}), (GERRIT_LABEL_VERIFIED, verified)) return result def sampleStartCB(builderName, build, arg): return makeReviewResult(str({'name': builderName}), (GERRIT_LABEL_REVIEWED, 0)) @defer.inlineCallbacks def sampleStartCBDeferred(builderName, build, arg): result = yield makeReviewResult(str({'name': builderName}), (GERRIT_LABEL_REVIEWED, 0)) return result def sampleSummaryCB(buildInfoList, results, status, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 return makeReviewResult(str(buildInfoList), (GERRIT_LABEL_VERIFIED, verified)) @defer.inlineCallbacks def sampleSummaryCBDeferred(buildInfoList, results, master, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 result = yield makeReviewResult(str(buildInfoList), (GERRIT_LABEL_VERIFIED, verified)) return result def legacyTestReviewCB(builderName, build, result, status, arg): msg = str({'name': builderName, 'result': result}) return (msg, 1 if result == SUCCESS else -1, 0) def legacyTestSummaryCB(buildInfoList, results, status, arg): success = False failure = False for buildInfo in buildInfoList: if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement success = True else: failure = True if failure: verified = -1 elif success: verified = 1 else: verified = 0 return (str(buildInfoList), verified, 0) class TestGerritStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupGerritStatusPushSimple(self, *args, **kwargs): serv = kwargs.pop("server", "serv") username = kwargs.pop("username", "user") gsp = GerritStatusPush(serv, username, *args, **kwargs) yield gsp.setServiceParent(self.master) yield gsp.startService() return gsp @defer.inlineCallbacks def setupGerritStatusPush(self, *args, **kwargs): gsp = yield self.setupGerritStatusPushSimple(*args, **kwargs) gsp.sendCodeReview = Mock() return gsp @defer.inlineCallbacks def setupBuildResults(self, buildResults, finalResult): self.insertTestData(buildResults, finalResult) res = yield utils.getDetailsForBuildset(self.master, 98, wantProperties=True) builds = res['builds'] buildset = res['buildset'] @defer.inlineCallbacks def getChangesForBuild(buildid): assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild return (buildset, builds) def makeBuildInfo(self, buildResults, resultText, builds): info = [] for i, buildResult in enumerate(buildResults): info.append({'name': "Builder%d" % i, 'result': buildResults[i], 'resultText': resultText[i], 'text': 'buildText', 'url': "http://localhost:8080/#builders/%d/builds/%d" % (79 + i, i), 'build': builds[i]}) return info @defer.inlineCallbacks def run_fake_summary_build(self, gsp, buildResults, finalResult, resultText, expWarning=False): buildset, builds = yield self.setupBuildResults(buildResults, finalResult) yield gsp.buildsetComplete('buildset.98.complete'.split("."), buildset) info = self.makeBuildInfo(buildResults, resultText, builds) if expWarning: self.assertEqual([w['message'] for w in self.flushWarnings()], ['The Gerrit status callback uses the old ' 'way to communicate results. The outcome ' 'might be not what is expected.']) return str(info) # check_summary_build and check_summary_build_legacy differ in two things: # * the callback used # * the expected result @defer.inlineCallbacks def check_summary_build_deferred(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCBDeferred) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT, self.TEST_REVISION, result) @defer.inlineCallbacks def check_summary_build(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT, self.TEST_REVISION, result) @defer.inlineCallbacks def check_summary_build_legacy(self, buildResults, finalResult, resultText, verifiedScore): gsp = yield self.setupGerritStatusPush(summaryCB=legacyTestSummaryCB) msg = yield self.run_fake_summary_build(gsp, buildResults, finalResult, resultText, expWarning=True) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore), (GERRIT_LABEL_REVIEWED, 0)) gsp.sendCodeReview.assert_called_once_with(self.TEST_PROJECT, self.TEST_REVISION, result) @defer.inlineCallbacks def test_gerrit_ssh_cmd(self): kwargs = { 'server': 'example.com', 'username': 'buildbot', } without_identity = yield self.setupGerritStatusPush(**kwargs) expected1 = [ 'ssh', 'buildbot@example.com', '-p', '29418', 'gerrit', 'foo'] self.assertEqual(expected1, without_identity._gerritCmd('foo')) yield without_identity.disownServiceParent() with_identity = yield self.setupGerritStatusPush( identity_file='/path/to/id_rsa', **kwargs) expected2 = [ 'ssh', '-i', '/path/to/id_rsa', 'buildbot@example.com', '-p', '29418', 'gerrit', 'foo', ] self.assertEqual(expected2, with_identity._gerritCmd('foo')) def test_buildsetComplete_success_sends_summary_review_deferred(self): d = self.check_summary_build_deferred(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=[ "succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_success_sends_summary_review(self): d = self.check_summary_build(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=["succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_failure_sends_summary_review(self): d = self.check_summary_build(buildResults=[FAILURE, FAILURE], finalResult=FAILURE, resultText=["failed", "failed"], verifiedScore=-1) return d def test_buildsetComplete_mixed_sends_summary_review(self): d = self.check_summary_build(buildResults=[SUCCESS, FAILURE], finalResult=FAILURE, resultText=["succeeded", "failed"], verifiedScore=-1) return d def test_buildsetComplete_success_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[SUCCESS, SUCCESS], finalResult=SUCCESS, resultText=[ "succeeded", "succeeded"], verifiedScore=1) return d def test_buildsetComplete_failure_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[FAILURE, FAILURE], finalResult=FAILURE, resultText=["failed", "failed"], verifiedScore=-1) return d def test_buildsetComplete_mixed_sends_summary_review_legacy(self): d = self.check_summary_build_legacy(buildResults=[SUCCESS, FAILURE], finalResult=FAILURE, resultText=["succeeded", "failed"], verifiedScore=-1) return d @defer.inlineCallbacks def test_buildsetComplete_filtered_builder(self): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) gsp.builders = ["foo"] yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE, ["failed", "failed"]) self.assertFalse( gsp.sendCodeReview.called, "sendCodeReview should not be called") @defer.inlineCallbacks def test_buildsetComplete_filtered_matching_builder(self): gsp = yield self.setupGerritStatusPush(summaryCB=sampleSummaryCB) gsp.builders = ["Builder1"] yield self.run_fake_summary_build(gsp, [FAILURE, FAILURE], FAILURE, ["failed", "failed"]) self.assertTrue( gsp.sendCodeReview.called, "sendCodeReview should be called") @defer.inlineCallbacks def run_fake_single_build(self, gsp, buildResult, expWarning=False): buildset, builds = yield self.setupBuildResults([buildResult], buildResult) yield gsp.buildStarted(None, builds[0]) yield gsp.buildComplete(None, builds[0]) if expWarning: self.assertEqual([w['message'] for w in self.flushWarnings()], ['The Gerrit status callback uses the old ' 'way to communicate results. The outcome ' 'might be not what is expected.']) return str({'name': 'Builder0', 'result': buildResult}) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def check_single_build(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCB, startCB=sampleStartCB) msg = yield self.run_fake_single_build(gsp, buildResult) start = makeReviewResult(str({'name': self.TEST_BUILDER_NAME}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) calls = [call(self.TEST_PROJECT, self.TEST_REVISION, start), call(self.TEST_PROJECT, self.TEST_REVISION, result)] gsp.sendCodeReview.assert_has_calls(calls) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def check_single_build_deferred(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCBDeferred, startCB=sampleStartCBDeferred) msg = yield self.run_fake_single_build(gsp, buildResult) start = makeReviewResult(str({'name': self.TEST_BUILDER_NAME}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore)) calls = [call(self.TEST_PROJECT, self.TEST_REVISION, start), call(self.TEST_PROJECT, self.TEST_REVISION, result)] gsp.sendCodeReview.assert_has_calls(calls) @defer.inlineCallbacks def check_single_build_legacy(self, buildResult, verifiedScore): gsp = yield self.setupGerritStatusPush(reviewCB=legacyTestReviewCB, startCB=sampleStartCB) msg = yield self.run_fake_single_build(gsp, buildResult, expWarning=True) start = makeReviewResult(str({'name': self.TEST_BUILDER_NAME}), (GERRIT_LABEL_REVIEWED, 0)) result = makeReviewResult(msg, (GERRIT_LABEL_VERIFIED, verifiedScore), (GERRIT_LABEL_REVIEWED, 0)) calls = [call(self.TEST_PROJECT, self.TEST_REVISION, start), call(self.TEST_PROJECT, self.TEST_REVISION, result)] gsp.sendCodeReview.assert_has_calls(calls) def test_buildComplete_success_sends_review(self): return self.check_single_build(SUCCESS, 1) def test_buildComplete_failure_sends_review(self): return self.check_single_build(FAILURE, -1) def test_buildComplete_success_sends_review_legacy(self): return self.check_single_build_legacy(SUCCESS, 1) def test_buildComplete_failure_sends_review_legacy(self): return self.check_single_build_legacy(FAILURE, -1) # same goes for check_single_build and check_single_build_legacy @defer.inlineCallbacks def test_single_build_filtered(self): gsp = yield self.setupGerritStatusPush(reviewCB=sampleReviewCB, startCB=sampleStartCB) gsp.builders = ["Builder0"] yield self.run_fake_single_build(gsp, SUCCESS) self.assertTrue( gsp.sendCodeReview.called, "sendCodeReview should be called") gsp.sendCodeReview = Mock() gsp.builders = ["foo"] yield self.run_fake_single_build(gsp, SUCCESS) self.assertFalse( gsp.sendCodeReview.called, "sendCodeReview should not be called") def test_defaultReviewCBSuccess(self): res = defaultReviewCB("builderName", {}, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': 1}) res = defaultReviewCB("builderName", {}, RETRY, None, None) self.assertEqual(res['labels'], {}) def test_defaultSummaryCB(self): info = self.makeBuildInfo( [SUCCESS, FAILURE], ["yes", "no"], [None, None]) res = defaultSummaryCB(info, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': -1}) info = self.makeBuildInfo( [SUCCESS, SUCCESS], ["yes", "yes"], [None, None]) res = defaultSummaryCB(info, SUCCESS, None, None) self.assertEqual(res['labels'], {'Verified': 1}) @defer.inlineCallbacks def testBuildGerritCommand(self): gsp = yield self.setupGerritStatusPushSimple() spawnSkipFirstArg = Mock() gsp.spawnProcess = lambda _, *a, **k: spawnSkipFirstArg(*a, **k) yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'version'], env=None) gsp.processVersion("2.6", lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', "--message 'bla'", '--label Verified=1', 'revision'], env=None) # <=2.5 uses other syntax gsp.processVersion("2.4", lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', "--message 'bla'", '--verified 1', 'revision'], env=None) # now test the notify argument, even though _gerrit_notify # is private, work around that gsp._gerrit_notify = 'OWNER' gsp.processVersion('2.6', lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview('project', 'revision', {'message': 'bla', 'labels': {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--notify OWNER', "--message 'bla'", '--label Verified=1', 'revision'], env=None) # gerrit versions <= 2.5 uses other syntax gsp.processVersion('2.4', lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview('project', 'revision', {'message': 'bla', 'labels': {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--notify OWNER', "--message 'bla'", '--verified 1', 'revision'], env=None) gsp.processVersion("2.13", lambda: None) spawnSkipFirstArg = Mock() yield gsp.sendCodeReview("project", "revision", {"message": "bla", "labels": {'Verified': 1}}) spawnSkipFirstArg.assert_called_once_with( 'ssh', ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'review', '--project project', '--tag autogenerated:buildbot', '--notify OWNER', "--message 'bla'", '--label Verified=1', 'revision'], env=None) @defer.inlineCallbacks def test_callWithVersion_bytes_output(self): gsp = yield self.setupGerritStatusPushSimple() exp_argv = ['ssh', 'user@serv', '-p', '29418', 'gerrit', 'version'] def spawnProcess(pp, cmd, argv, env): self.assertEqual([cmd, argv], [exp_argv[0], exp_argv]) pp.errReceived(b'test stderr\n') pp.outReceived(b'gerrit version 2.14\n') pp.outReceived(b'(garbage that should not cause a crash)\n') so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) gsp.callWithVersion(lambda: self.assertEqual( gsp.gerrit_version, LooseVersion('2.14'))) def test_name_as_class_attribute(self): class FooStatusPush(GerritStatusPush): name = 'foo' reporter = FooStatusPush('gerrit.server.com', 'password') self.assertEqual(reporter.name, 'foo') def test_name_as_kwarg(self): reporter = GerritStatusPush('gerrit.server.com', 'password', name='foo') self.assertEqual(reporter.name, 'foo') def test_default_name(self): reporter = GerritStatusPush('gerrit.server.com', 'password') self.assertEqual(reporter.name, 'GerritStatusPush') buildbot-2.6.0/master/buildbot/test/unit/test_reporter_gerrit_verify_status.py000066400000000000000000000360731361162603000302050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import renderer from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.gerrit_verify_status import GerritVerifyStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin from .test_changes_gerritchangesource import TestGerritChangeSource class TestGerritVerifyStatusPush(TestReactorMixin, ReporterTestMixin, logging.LoggingMixin, unittest.TestCase): TEST_PROPS = {'gerrit_changes': [{'change_id': 12, 'revision_id': 2}]} @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def createGerritStatus(self, **kwargs): auth = kwargs.pop('auth', ('log', Interpolate('pass'))) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, "gerrit", auth=('log', 'pass'), debug=None, verify=None) self.sp = sp = GerritVerifyStatusPush("gerrit", auth=auth, **kwargs) sp.sessionFactory = Mock(return_value=Mock()) yield sp.setServiceParent(self.master) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_basic(self): yield self.createGerritStatus() build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': -1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_custom_description(self): yield self.createGerritStatus( startDescription=Interpolate("started %(prop:buildername)s"), endDescription=Interpolate("finished %(prop:buildername)s")) build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'started Builder0', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'finished Builder0', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_custom_name(self): yield self.createGerritStatus( verification_name=Interpolate("builder %(prop:buildername)s")) build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'builder Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'builder Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_custom_abstain(self): yield self.createGerritStatus( abstain=renderer(lambda p: p.getProperty("buildername") == 'Builder0')) build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': True, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': True, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_custom_category(self): yield self.createGerritStatus( category=renderer(lambda p: p.getProperty("buildername"))) build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'category': 'Builder0', 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'category': 'Builder0', 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_custom_reporter(self): yield self.createGerritStatus( reporter=renderer(lambda p: p.getProperty("buildername"))) build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build done.', 'abstain': False, 'name': 'Builder0', 'reporter': 'Builder0', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 1, 'duration': '2h 1m 4s' }) build['complete'] = False build['complete_at'] = None self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True build['complete_at'] = build['started_at'] + \ datetime.timedelta(hours=2, minutes=1, seconds=4) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_verbose(self): yield self.createGerritStatus(verbose=True) build = yield self.setupBuildResults(SUCCESS) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self.setUpLogging() self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged("Sending Gerrit status for") @defer.inlineCallbacks def test_not_verbose(self): yield self.createGerritStatus(verbose=False) build = yield self.setupBuildResults(SUCCESS) self._http.expect( method='post', ep='/a/changes/12/revisions/2/verify-status~verifications', json={ 'comment': 'Build started.', 'abstain': False, 'name': 'Builder0', 'reporter': 'buildbot', 'url': 'http://localhost:8080/#builders/79/builds/0', 'value': 0, 'duration': 'pending' }) self.setUpLogging() self._http.quiet = True self.sp.buildStarted(("build", 20, "started"), build) self.assertWasQuiet() @defer.inlineCallbacks def test_format_duration(self): yield self.createGerritStatus(verbose=False) self.assertEqual( self.sp.formatDuration(datetime.timedelta(seconds=1)), "0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(hours=1, seconds=1)), "1h 0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(days=1, seconds=1)), "1 day 0h 0m 1s") self.assertEqual( self.sp.formatDuration(datetime.timedelta(days=2, seconds=1)), "2 days 0h 0m 1s") @defer.inlineCallbacks def test_gerrit_changes(self): yield self.createGerritStatus() # from chdict: chdict = TestGerritChangeSource.expected_change props = Properties.fromDict({ k: (v, 'change') for k, v in chdict['properties'].items()}) changes = self.sp.getGerritChanges(props) self.assertEqual(changes, [ {'change_id': '4321', 'revision_id': '12'} ]) buildbot-2.6.0/master/buildbot/test/unit/test_reporter_github.py000066400000000000000000000242521361162603000252000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.github import HOSTED_BASE_URL from buildbot.reporters.github import GitHubCommentPush from buildbot.reporters.github import GitHubStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestGitHubStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin): # project must be in the form / TEST_PROJECT = 'buildbot/buildbot' @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, HOSTED_BASE_URL, headers={ 'Authorization': 'token XXYYZZ', 'User-Agent': 'Buildbot' }, debug=None, verify=None) sp = self.setService() sp.sessionFactory = Mock(return_value=Mock()) yield sp.setServiceParent(self.master) def setService(self): self.sp = GitHubStatusPush(Interpolate('XXYYZZ')) return self.sp def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_basic(self): build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot/buildbot/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def setupBuildResultsMin(self, buildResults): self.insertTestData([buildResults], buildResults, insertSS=False) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_empty(self): build = yield self.setupBuildResultsMin(SUCCESS) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) class TestGitHubStatusPushURL(TestReactorMixin, unittest.TestCase, ReporterTestMixin): # project must be in the form / TEST_PROJECT = 'buildbot' TEST_REPO = 'https://github.com/buildbot1/buildbot1.git' @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, HOSTED_BASE_URL, headers={ 'Authorization': 'token XXYYZZ', 'User-Agent': 'Buildbot' }, debug=None, verify=None) sp = self.setService() sp.sessionFactory = Mock(return_value=Mock()) yield sp.setServiceParent(self.master) def setService(self): self.sp = GitHubStatusPush('XXYYZZ') return self.sp def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_ssh(self): self.TEST_REPO = 'git@github.com:buildbot2/buildbot2.git' build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot2/buildbot2/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_https(self): build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'pending', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build started.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) self._http.expect( 'post', '/repos/buildbot1/buildbot1/statuses/d34db33fd43db33f', json={'state': 'failure', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'description': 'Build done.', 'context': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) class TestGitHubCommentPush(TestGitHubStatusPush): def setService(self): self.sp = GitHubCommentPush('XXYYZZ') return self.sp @defer.inlineCallbacks def test_basic(self): build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'post', '/repos/buildbot/buildbot/issues/34/comments', json={'body': 'Build done.'}) self._http.expect( 'post', '/repos/buildbot/buildbot/issues/34/comments', json={'body': 'Build done.'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_empty(self): build = yield self.setupBuildResultsMin(SUCCESS) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) buildbot-2.6.0/master/buildbot/test/unit/test_reporter_gitlab.py000066400000000000000000000207141361162603000251570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters.gitlab import HOSTED_BASE_URL from buildbot.reporters.gitlab import GitLabStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestGitLabStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, logging.LoggingMixin): # repository must be in the form http://gitlab// TEST_REPO = 'http://gitlab/buildbot/buildbot' @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, HOSTED_BASE_URL, headers={'PRIVATE-TOKEN': 'XXYYZZ'}, debug=None, verify=None) self.sp = sp = GitLabStatusPush(Interpolate('XXYYZZ')) sp.sessionFactory = Mock(return_value=Mock()) yield sp.setServiceParent(self.master) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_basic(self): build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'success', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build done.', 'name': 'buildbot/Builder0'}) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'failed', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build done.', 'name': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) build['results'] = FAILURE self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_sshurl(self): self.TEST_REPO = 'git@gitlab:buildbot/buildbot.git' build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) @defer.inlineCallbacks def test_merge_request_forked(self): self.TEST_REPO = 'git@gitlab:buildbot/buildbot.git' self.TEST_PROPS['source_project_id'] = 20922342342 build = yield self.setupBuildResults(SUCCESS) self._http.expect( 'post', '/api/v4/projects/20922342342/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) # Don't run these tests in parallel! del self.TEST_PROPS['source_project_id'] @defer.inlineCallbacks def test_noproject(self): self.TEST_REPO = 'git@gitlab:buildbot/buildbot.git' self.setUpLogging() build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "message": 'project not found' }, code=404) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged(r"Unknown \(or hidden\) gitlab projectbuildbot%2Fbuildbot:" r" project not found") @defer.inlineCallbacks def test_nourl(self): self.TEST_REPO = '' build = yield self.setupBuildResults(SUCCESS) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) # implicit check that no http request is done # nothing is logged as well @defer.inlineCallbacks def test_senderror(self): self.setUpLogging() build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) self._http.expect( 'post', '/api/v4/projects/1/statuses/d34db33fd43db33f', json={'state': 'running', 'target_url': 'http://localhost:8080/#builders/79/builds/0', 'ref': 'master', 'description': 'Build started.', 'name': 'buildbot/Builder0'}, content_json={'message': 'sha1 not found for branch master'}, code=404) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged("Could not send status \"running\" for" " http://gitlab/buildbot/buildbot at d34db33fd43db33f:" " sha1 not found for branch master") @defer.inlineCallbacks def test_badchange(self): self.setUpLogging() build = yield self.setupBuildResults(SUCCESS) # we make sure proper calls to txrequests have been made self._http.expect( 'get', '/api/v4/projects/buildbot%2Fbuildbot', content_json={ "id": 1 }) build['complete'] = False self.sp.buildStarted(("build", 20, "started"), build) self.assertLogged("Failed to send status \"running\" for" " http://gitlab/buildbot/buildbot at d34db33fd43db33f\n" "Traceback") self.flushLoggedErrors(AssertionError) buildbot-2.6.0/master/buildbot/test/unit/test_reporter_http.py000066400000000000000000000143121361162603000246710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.http import HttpStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class BuildLookAlike: """ a class whose instances compares to any build dict that this reporter is supposed to send out""" def __init__(self, keys=None, **assertions): self.keys = [ 'builder', 'builderid', 'buildid', 'buildrequest', 'buildrequestid', 'buildset', 'complete', 'complete_at', 'masterid', 'number', 'properties', 'results', 'started_at', 'state_string', 'url', 'workerid'] if keys: self.keys.extend(keys) self.keys.sort() self.assertions = assertions def __eq__(self, b): if sorted(b.keys()) != self.keys: return False for k, v in self.assertions.items(): if b[k] != v: return False return True def __ne__(self, b): return not (self == b) def __repr__(self): return "{ any build }" class TestHttpStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed config._errors = Mock() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) yield self.master.startService() @defer.inlineCallbacks def createReporter(self, auth=("username", "passwd"), **kwargs): self._http = yield fakehttpclientservice.HTTPClientService.getService( self.master, "serv", auth=auth, debug=None, verify=None) interpolated_auth = None if auth is not None: username, passwd = auth passwd = Interpolate(passwd) interpolated_auth = (username, passwd) self.sp = sp = HttpStatusPush("serv", auth=interpolated_auth, **kwargs) yield sp.setServiceParent(self.master) @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() config._errors = None @defer.inlineCallbacks def setupBuildResults(self, buildResults): self.insertTestData([buildResults], buildResults) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_basic(self): yield self.createReporter() self._http.expect("post", "", json=BuildLookAlike(complete=False)) self._http.expect("post", "", json=BuildLookAlike(complete=True)) build = yield self.setupBuildResults(SUCCESS) build['complete'] = False self.sp.buildStarted(("build", 20, "new"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_basic_noauth(self): yield self.createReporter(auth=None) self._http.expect("post", "", json=BuildLookAlike(complete=False)) self._http.expect("post", "", json=BuildLookAlike(complete=True)) build = yield self.setupBuildResults(SUCCESS) build['complete'] = False self.sp.buildStarted(("build", 20, "new"), build) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_filtering(self): yield self.createReporter(builders=['foo']) build = yield self.setupBuildResults(SUCCESS) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_filteringPass(self): yield self.createReporter(builders=['Builder0']) self._http.expect("post", "", json=BuildLookAlike()) build = yield self.setupBuildResults(SUCCESS) self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def test_builderTypeCheck(self): yield self.createReporter(builders='Builder0') config._errors.addError.assert_any_call( "builders must be a list or None") @defer.inlineCallbacks def test_wantKwargsCheck(self): yield self.createReporter(builders='Builder0', wantProperties=True, wantSteps=True, wantPreviousBuild=True, wantLogs=True) self._http.expect("post", "", json=BuildLookAlike( keys=['steps', 'prev_build'])) build = yield self.setupBuildResults(SUCCESS) build['complete'] = True self.sp.buildFinished(("build", 20, "finished"), build) @defer.inlineCallbacks def http2XX(self, code, content): yield self.createReporter() self._http.expect('post', '', code=code, content=content, json=BuildLookAlike()) build = yield self.setupBuildResults(SUCCESS) self.sp.buildStarted(('build', 20, 'finished'), build) @defer.inlineCallbacks def test_http200(self): yield self.http2XX(code=200, content="OK") @defer.inlineCallbacks def test_http201(self): # e.g. GitHub returns 201 yield self.http2XX(code=201, content="Created") @defer.inlineCallbacks def test_http202(self): yield self.http2XX(code=202, content="Accepted") buildbot-2.6.0/master/buildbot/test/unit/test_reporter_zulip.py000066400000000000000000000166261361162603000250670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from dateutil.tz import tzutc from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.reporters.zulip import ZulipStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestZulipStatusPush(unittest.TestCase, ReporterTestMixin, LoggingMixin, ConfigErrorsMixin, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master( testcase=self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def tearDown(self): if self.master.running: yield self.master.stopService() @defer.inlineCallbacks def setupZulipStatusPush(self, endpoint="http://example.com", token="123", stream=None): self.sp = ZulipStatusPush( endpoint=endpoint, token=token, stream=stream) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, endpoint, debug=None, verify=None) yield self.sp.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def setupBuildResults(self): self.insertTestData([SUCCESS], SUCCESS) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_build_started(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.setupBuildResults() build["started_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "new", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923 }) self.sp.buildStarted(('build', 20, 'new'), build) @defer.inlineCallbacks def test_build_finished(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.setupBuildResults() build["complete_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "finished", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923, "results": 0 }) self.sp.buildFinished(('build', 20, 'finished'), build) @defer.inlineCallbacks def test_stream_none(self): yield self.setupZulipStatusPush(stream=None) build = yield self.setupBuildResults() build["complete_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123', json={ "event": "finished", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923, "results": 0 }) self.sp.buildFinished(('build', 20, 'finished'), build) def test_endpoint_string(self): with self.assertRaisesConfigError( "Endpoint must be a string"): ZulipStatusPush(endpoint=1234, token="abcd") def test_token_string(self): with self.assertRaisesConfigError( "Token must be a string"): ZulipStatusPush(endpoint="http://example.com", token=1234) @defer.inlineCallbacks def test_invalid_json_data(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.setupBuildResults() build["started_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "new", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923 }, code=500) self.setUpLogging() self.sp.buildStarted(("build", 20, "new"), build) self.assertLogged('500: Error pushing build status to Zulip') @defer.inlineCallbacks def test_invalid_url(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.setupBuildResults() build["started_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "new", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923 }, code=404) self.setUpLogging() self.sp.buildStarted(("build", 20, "new"), build) self.assertLogged('404: Error pushing build status to Zulip') @defer.inlineCallbacks def test_invalid_token(self): yield self.setupZulipStatusPush(stream="xyz") build = yield self.setupBuildResults() build["started_at"] = datetime.datetime( 2019, 4, 1, 23, 38, 43, 154354, tzinfo=tzutc()) self._http.expect( 'post', '/api/v1/external/buildbot?api_key=123&stream=xyz', json={ "event": "new", "buildid": 20, "buildername": "Builder0", "url": "http://localhost:8080/#builders/79/builds/0", "project": "testProject", "timestamp": 1554161923 }, code=401, content_json={"result": "error", "msg": "Invalid API key", "code": "INVALID_API_KEY"}) self.setUpLogging() self.sp.buildStarted(("build", 20, "new"), build) self.assertLogged('401: Error pushing build status to Zulip') buildbot-2.6.0/master/buildbot/test/unit/test_reporters_hipchat.py000066400000000000000000000227031361162603000255200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.hipchat import HOSTED_BASE_URL from buildbot.reporters.hipchat import HipChatStatusPush from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.logging import LoggingMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.reporter import ReporterTestMixin class TestHipchatStatusPush(TestReactorMixin, unittest.TestCase, ReporterTestMixin, LoggingMixin): def setUp(self): self.setUpTestReactor() # ignore config error if txrequests is not installed self.patch(config, '_errors', Mock()) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def tearDown(self): if self.master.running: yield self.master.stopService() @defer.inlineCallbacks def createReporter(self, **kwargs): kwargs['auth_token'] = kwargs.get('auth_token', 'abc') self.sp = HipChatStatusPush(**kwargs) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, kwargs.get('endpoint', HOSTED_BASE_URL), debug=None, verify=None) yield self.sp.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def setupBuildResults(self): self.insertTestData([SUCCESS], SUCCESS) build = yield self.master.data.get(("builds", 20)) return build @defer.inlineCallbacks def test_authtokenTypeCheck(self): yield self.createReporter(auth_token=2) config._errors.addError.assert_any_call('auth_token must be a string') def test_endpointTypeCheck(self): HipChatStatusPush(auth_token="2", endpoint=2) config._errors.addError.assert_any_call('endpoint must be a string') @defer.inlineCallbacks def test_builderRoomMapTypeCheck(self): yield self.createReporter(builder_room_map=2) config._errors.addError.assert_any_call( 'builder_room_map must be a dict') @defer.inlineCallbacks def test_builderUserMapTypeCheck(self): yield self.createReporter(builder_user_map=2) config._errors.addError.assert_any_call( 'builder_user_map must be a dict') @defer.inlineCallbacks def test_interpolateAuth(self): yield self.createReporter(auth_token=Interpolate('auth'), builder_user_map={'Builder0': '123'}) build = yield self.setupBuildResults() self._http.expect( 'post', '/v2/user/123/message', params=dict(auth_token='auth'), json={'message': 'Buildbot started build Builder0 here: http://localhost:8080/#builders/79/builds/0'}) self.sp.buildStarted(('build', 20, 'new'), build) @defer.inlineCallbacks def test_build_started(self): yield self.createReporter(builder_user_map={'Builder0': '123'}) build = yield self.setupBuildResults() self._http.expect( 'post', '/v2/user/123/message', params=dict(auth_token='abc'), json={'message': 'Buildbot started build Builder0 here: http://localhost:8080/#builders/79/builds/0'}) self.sp.buildStarted(('build', 20, 'new'), build) @defer.inlineCallbacks def test_build_finished(self): yield self.createReporter(builder_room_map={'Builder0': '123'}) build = yield self.setupBuildResults() self._http.expect( 'post', '/v2/room/123/notification', params=dict(auth_token='abc'), json={'message': 'Buildbot finished build Builder0 with result success ' 'here: http://localhost:8080/#builders/79/builds/0'}) self.sp.buildFinished(('build', 20, 'finished'), build) @defer.inlineCallbacks def test_inject_extra_params(self): yield self.createReporter(builder_room_map={'Builder0': '123'}) self.sp.getExtraParams = Mock() self.sp.getExtraParams.return_value = {'format': 'html'} build = yield self.setupBuildResults() self._http.expect( 'post', '/v2/room/123/notification', params=dict(auth_token='abc'), json={'message': 'Buildbot finished build Builder0 with result success ' 'here: http://localhost:8080/#builders/79/builds/0', 'format': 'html'}) self.sp.buildFinished(('build', 20, 'finished'), build) @defer.inlineCallbacks def test_no_message_sent_empty_message(self): yield self.createReporter() build = yield self.setupBuildResults() self.sp.send(build, 'unknown') @defer.inlineCallbacks def test_no_message_sent_without_id(self): yield self.createReporter() build = yield self.setupBuildResults() self.sp.send(build, 'new') @defer.inlineCallbacks def test_private_message_sent_with_user_id(self): token = 'tok' endpoint = 'example.com' yield self.createReporter(auth_token=token, endpoint=endpoint) self.sp.getBuildDetailsAndSendMessage = Mock() message = {'message': 'hi'} postData = dict(message) postData.update({'id_or_email': '123'}) self.sp.getBuildDetailsAndSendMessage.return_value = postData self._http.expect( 'post', '/v2/user/123/message', params=dict(auth_token=token), json=message) self.sp.send({}, 'test') @defer.inlineCallbacks def test_room_message_sent_with_room_id(self): token = 'tok' endpoint = 'example.com' yield self.createReporter(auth_token=token, endpoint=endpoint) self.sp.getBuildDetailsAndSendMessage = Mock() message = {'message': 'hi'} postData = dict(message) postData.update({'room_id_or_name': '123'}) self.sp.getBuildDetailsAndSendMessage.return_value = postData self._http.expect( 'post', '/v2/room/123/notification', params=dict(auth_token=token), json=message) self.sp.send({}, 'test') @defer.inlineCallbacks def test_private_and_room_message_sent_with_both_ids(self): token = 'tok' endpoint = 'example.com' yield self.createReporter(auth_token=token, endpoint=endpoint) self.sp.getBuildDetailsAndSendMessage = Mock() message = {'message': 'hi'} postData = dict(message) postData.update({'room_id_or_name': '123', 'id_or_email': '456'}) self.sp.getBuildDetailsAndSendMessage.return_value = postData self._http.expect( 'post', '/v2/user/456/message', params=dict(auth_token=token), json=message) self._http.expect( 'post', '/v2/room/123/notification', params=dict(auth_token=token), json=message) self.sp.send({}, 'test') @defer.inlineCallbacks def test_postData_values_passed_through(self): token = 'tok' endpoint = 'example.com' yield self.createReporter(auth_token=token, endpoint=endpoint) self.sp.getBuildDetailsAndSendMessage = Mock() message = {'message': 'hi', 'notify': True, 'message_format': 'html'} postData = dict(message) postData.update({'id_or_email': '123'}) self.sp.getBuildDetailsAndSendMessage.return_value = postData self._http.expect( 'post', '/v2/user/123/message', params=dict(auth_token=token), json=message) self.sp.send({}, 'test') @defer.inlineCallbacks def test_postData_error(self): token = 'tok' endpoint = 'example.com' yield self.createReporter(auth_token=token, endpoint=endpoint) self.sp.getBuildDetailsAndSendMessage = Mock() message = {'message': 'hi', 'notify': True, 'message_format': 'html'} postData = dict(message) postData.update({'id_or_email': '123'}) self.sp.getBuildDetailsAndSendMessage.return_value = postData self._http.expect( 'post', '/v2/user/123/message', params=dict(auth_token=token), json=message, code=404, content_json={ "error_description": "This user is unknown to us", "error": "invalid_user"}) self.setUpLogging() self.sp.send({}, 'test') self.assertLogged('404: unable to upload status') buildbot-2.6.0/master/buildbot/test/unit/test_reporters_irc.py000066400000000000000000000444651361162603000246660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import mock from twisted.application import internet from twisted.internet import defer from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process.properties import Interpolate from buildbot.process.results import ALL_RESULTS from buildbot.process.results import SUCCESS from buildbot.reporters import irc from buildbot.reporters import words from buildbot.test.unit.test_reporters_words import ContactMixin from buildbot.test.util import config from buildbot.util import service class TestIrcContact(ContactMixin, unittest.TestCase): channelClass = irc.IRCChannel contactClass = irc.IRCContact def patch_act(self): self.actions = [] def act(msg): self.actions.append(msg) self.contact.act = act @defer.inlineCallbacks def test_op_required_authz(self): self.bot.authz = self.bot.expand_authz({ ('mute', 'unmute'): [self.USER] }) self.bot.getChannelOps = lambda channel: ['channelop'] self.assertFalse((yield self.contact.op_required('mute'))) @defer.inlineCallbacks def test_op_required_operator(self): self.bot.getChannelOps = lambda channel: [self.USER] self.assertFalse((yield self.contact.op_required('command'))) @defer.inlineCallbacks def test_op_required_unauthorized(self): self.bot.getChannelOps = lambda channel: ['channelop'] self.assertTrue((yield self.contact.op_required('command'))) @defer.inlineCallbacks def test_command_mute(self): self.bot.getChannelOps = lambda channel: [self.USER] yield self.do_test_command('mute') self.assertTrue(self.contact.channel.muted) @defer.inlineCallbacks def test_command_mute_unauthorized(self): self.bot.getChannelOps = lambda channel: [] yield self.do_test_command('mute') self.assertFalse(self.contact.channel.muted) self.assertIn("blah, blah", self.sent[0]) @defer.inlineCallbacks def test_command_unmute(self): self.bot.getChannelOps = lambda channel: [self.USER] self.contact.channel.muted = True yield self.do_test_command('unmute') self.assertFalse(self.contact.channel.muted) @defer.inlineCallbacks def test_command_unmute_unauthorized(self): self.bot.getChannelOps = lambda channel: [] self.contact.channel.muted = True yield self.do_test_command('unmute') self.assertTrue(self.contact.channel.muted) @defer.inlineCallbacks def test_command_unmute_not_muted(self): self.bot.getChannelOps = lambda channel: [self.USER] yield self.do_test_command('unmute') self.assertFalse(self.contact.channel.muted) self.assertIn("No one had told me to be quiet", self.sent[0]) @defer.inlineCallbacks def test_command_notify(self): self.bot.getChannelOps = lambda channel: [self.USER] self.assertNotIn('success', self.contact.channel.notify_events) yield self.do_test_command('notify', 'on success') self.assertIn('success', self.contact.channel.notify_events) @defer.inlineCallbacks def test_command_notify_unauthorized(self): self.bot.getChannelOps = lambda channel: [] self.assertNotIn('success', self.contact.channel.notify_events) yield self.do_test_command('notify', 'on success') self.assertNotIn('success', self.contact.channel.notify_events) @defer.inlineCallbacks def test_command_destroy(self): self.patch_act() yield self.do_test_command('destroy', exp_usage=False) self.assertEqual(self.actions, ['readies phasers']) @defer.inlineCallbacks def test_command_dance(self): yield self.do_test_command('dance', clock_ticks=[1.0] * 10, exp_usage=False) self.assertTrue(self.sent) # doesn't matter what it sent @defer.inlineCallbacks def test_command_hustle(self): self.patch_act() yield self.do_test_command('hustle', clock_ticks=[1.0] * 2, exp_usage=False) self.assertEqual(self.actions, ['does the hustle']) def test_send(self): events = [] def groupChat(dest, msg): events.append((dest, msg)) self.contact.bot.groupSend = groupChat self.contact.send("unmuted") self.contact.send("unmuted, unicode \N{SNOWMAN}") self.contact.channel.muted = True self.contact.send("muted") self.assertEqual(events, [ ('#buildbot', 'unmuted'), ('#buildbot', 'unmuted, unicode \u2603'), ]) def test_handleAction_ignored(self): self.patch_act() self.contact.handleAction('waves hi') self.assertEqual(self.actions, []) def test_handleAction_kick(self): self.patch_act() self.contact.handleAction('kicks nick') self.assertEqual(self.actions, ['kicks back']) def test_handleAction_stupid(self): self.patch_act() self.contact.handleAction('stupids nick') self.assertEqual(self.actions, ['stupids me too']) def test_act(self): events = [] def groupDescribe(dest, msg): events.append((dest, msg)) self.contact.bot.groupDescribe = groupDescribe self.contact.act("unmuted") self.contact.act("unmuted, unicode \N{SNOWMAN}") self.contact.channel.muted = True self.contact.act("muted") self.assertEqual(events, [ ('#buildbot', 'unmuted'), ('#buildbot', 'unmuted, unicode \u2603'), ]) class FakeContact(service.AsyncService): def __init__(self, user, channel=None): super().__init__() self.user_id = user self.channel = mock.Mock() self.messages = [] self.actions = [] def handleMessage(self, message): self.messages.append(message) def handleAction(self, data): self.actions.append(data) class TestIrcStatusBot(unittest.TestCase): def makeBot(self, *args, **kwargs): if not args: args = ('nick', 'pass', ['#ch'], [], False) bot = irc.IrcStatusBot(*args, **kwargs) bot.parent = mock.Mock() bot.parent.master.db.state.getState = lambda *args, **kwargs: None return bot def test_groupDescribe(self): b = self.makeBot() b.describe = lambda d, m: evts.append(('n', d, m)) evts = [] b.groupDescribe('#chan', 'hi') self.assertEqual(evts, [('n', '#chan', 'hi')]) def test_groupChat(self): b = self.makeBot() b.msg = lambda d, m: evts.append(('n', d, m)) evts = [] b.groupSend('#chan', 'hi') self.assertEqual(evts, [('n', '#chan', 'hi')]) def test_groupChat_notice(self): b = self.makeBot('nick', 'pass', ['#ch'], [], True) b.notice = lambda d, m: evts.append(('n', d, m)) evts = [] b.groupSend('#chan', 'hi') self.assertEqual(evts, [('n', '#chan', 'hi')]) def test_msg(self): b = self.makeBot() b.msg = lambda d, m: evts.append(('m', d, m)) evts = [] b.msg('nick', 'hi') self.assertEqual(evts, [('m', 'nick', 'hi')]) def test_getContact(self): b = self.makeBot() c1 = b.getContact(user='u1', channel='c1') c2 = b.getContact(user='u1', channel='c2') c1b = b.getContact(user='u1', channel='c1') self.assertIdentical(c1, c1b) self.assertIsInstance(c2, words.Contact) def test_getContact_case_insensitive(self): b = self.makeBot() c1 = b.getContact(user='u1') c1b = b.getContact(user='U1') self.assertIdentical(c1, c1b) def test_getContact_invalid(self): b = self.makeBot() b.authz = {'': None} u = b.getContact(user='u0', channel='c0') self.assertNotIn(('c0', 'u0'), b.contacts) self.assertNotIn('c0', b.channels) self.assertEqual(sys.getrefcount(u), 2) # local, sys c = u.channel self.assertEqual(sys.getrefcount(c), 3) # local, contact, sys del u self.assertEqual(sys.getrefcount(c), 2) # local, sys def test_getContact_valid(self): b = self.makeBot() b.authz = {'': None, 'command': ['u0']} b.getContact(user='u0', channel='c0') self.assertIn(('c0', 'u0'), b.contacts) def test_privmsg_user(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', 'nick', 'hello') c = b.getContact('jimmy') self.assertEqual(c.messages, ['hello']) def test_privmsg_user_uppercase(self): b = self.makeBot('NICK', 'pass', ['#ch'], [], False) b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', 'NICK', 'hello') c = b.getContact('jimmy') self.assertEqual(c.messages, ['hello']) def test_privmsg_channel_unrelated(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', '#ch', 'hello') c = b.getContact('jimmy', '#ch') self.assertEqual(c.messages, []) def test_privmsg_channel_related(self): b = self.makeBot() b.contactClass = FakeContact b.privmsg('jimmy!~foo@bar', '#ch', 'nick: hello') c = b.getContact('jimmy', '#ch') self.assertEqual(c.messages, [' hello']) def test_action_unrelated(self): b = self.makeBot() b.contactClass = FakeContact b.action('jimmy!~foo@bar', '#ch', 'waves') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, []) def test_action_unrelated_buildbot(self): b = self.makeBot() b.contactClass = FakeContact # b.nickname is not 'buildbot' b.action('jimmy!~foo@bar', '#ch', 'waves at buildbot') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, []) def test_action_related(self): b = self.makeBot() b.contactClass = FakeContact b.action('jimmy!~foo@bar', '#ch', 'waves at nick') c = b.getContact('jimmy', '#ch') self.assertEqual(c.actions, ['waves at nick']) def test_signedOn(self): b = self.makeBot('nick', 'pass', ['#ch1', dict(channel='#ch2', password='sekrits')], ['jimmy', 'bobby'], False) evts = [] def msg(d, m): evts.append(('m', d, m)) b.msg = msg def join(channel, key): evts.append(('k', channel, key)) b.join = join b.contactClass = FakeContact b.signedOn() self.assertEqual(sorted(evts), [ ('k', '#ch1', None), ('k', '#ch2', 'sekrits'), ('m', 'Nickserv', 'IDENTIFY pass'), ]) self.assertEqual(sorted(b.contacts.keys()), # channels don't get added until joined() is called sorted([('jimmy', 'jimmy'), ('bobby', 'bobby')])) def test_joined(self): b = self.makeBot() b.joined('#ch1') b.joined('#ch2') self.assertEqual(sorted(b.channels.keys()), sorted(['#ch1', '#ch2'])) def test_userLeft_or_userKicked(self): b = self.makeBot() b.getContact(channel='c', user='u') self.assertIn(('c', 'u'), b.contacts) b.userKicked('u', 'c', 'k', 'm') self.assertNotIn(('c', 'u'), b.contacts) def test_userQuit(self): b = self.makeBot() b.getContact(channel='c1', user='u') b.getContact(channel='c2', user='u') b.getContact(user='u') self.assertEquals(len(b.contacts), 3) b.userQuit('u', 'm') self.assertEquals(len(b.contacts), 0) def test_other(self): # these methods just log, but let's get them covered anyway b = self.makeBot() b.left('#ch1') b.kickedFrom('#ch1', 'dustin', 'go away!') def test_format_build_status(self): b = self.makeBot() self.assertEquals(b.format_build_status({'results': SUCCESS}), "completed successfully") def test_format_build_status_short(self): b = self.makeBot() self.assertEquals(b.format_build_status({'results': SUCCESS}, True), ", Success") def test_format_build_status_colors(self): b = self.makeBot() b.useColors = True self.assertEqual(b.format_build_status({'results': SUCCESS}), "\x033completed successfully\x0f") colors_used = set() status_texts = set() for result in ALL_RESULTS: status = b.format_build_status({'results': result}) self.assertTrue(status.startswith('\x03')) self.assertTrue(status.endswith('\x0f')) for i, c in enumerate(status[1:-1], start=2): if c.isnumeric(): continue break colors_used.add(status[1:i]) status_texts.add(status[i:-1]) self.assertEqual(len(colors_used), len(ALL_RESULTS)) self.assertEqual(len(status_texts), len(ALL_RESULTS)) def test_getNames(self): b = self.makeBot() b.sendLine = lambda *args: None d = b.getNames('#channel') names = [] def cb(n): names.extend(n) d.addCallback(cb) b.irc_RPL_NAMREPLY('', ('test', '=', '#channel', 'user1 user2')) b.irc_RPL_ENDOFNAMES('', ('test', '#channel')) self.assertEqual(names, ['user1', 'user2']) def test_getChannelOps(self): b = self.makeBot() b.sendLine = lambda *args: None d = b.getChannelOps('#channel') names = [] def cb(n): names.extend(n) d.addCallback(cb) b.irc_RPL_NAMREPLY('', ('test', '=', '#channel', 'user1 @user2')) b.irc_RPL_ENDOFNAMES('', ('test', '#channel')) self.assertEqual(names, ['user2']) class TestIrcStatusFactory(unittest.TestCase): def makeFactory(self, *args, **kwargs): if not args: args = ('nick', 'pass', ['ch'], [], [], {}, {}) return irc.IrcStatusFactory(*args, **kwargs) def test_shutdown(self): # this is kinda lame, but the factory would be better tested # in an integration-test environment f = self.makeFactory() self.assertFalse(f.shuttingDown) f.shutdown() self.assertTrue(f.shuttingDown) class TestIRC(config.ConfigErrorsMixin, unittest.TestCase): def makeIRC(self, **kwargs): kwargs.setdefault('host', 'localhost') kwargs.setdefault('nick', 'russo') kwargs.setdefault('channels', ['#buildbot']) self.factory = None def TCPClient(host, port, factory): client = mock.Mock(name='tcp-client') client.host = host client.port = port client.factory = factory # keep for later self.factory = factory self.client = client return client self.patch(internet, 'TCPClient', TCPClient) return irc.IRC(**kwargs) @defer.inlineCallbacks def test_constr(self): ircStatus = self.makeIRC(host='foo', port=123) yield ircStatus.startService() self.client.setServiceParent.assert_called_with(ircStatus) self.assertEqual(self.client.host, 'foo') self.assertEqual(self.client.port, 123) self.assertIsInstance(self.client.factory, irc.IrcStatusFactory) @defer.inlineCallbacks def test_constr_args(self): # test that the args to IRC(..) make it all the way down to # the IrcStatusBot class s = self.makeIRC( host='host', nick='nick', channels=['channels'], pm_to_nicks=['pm', 'to', 'nicks'], noticeOnChannel=True, port=1234, tags=['tags'], password=Interpolate('pass'), notify_events={'successToFailure': 1, }, showBlameList=False, useRevisions=True, useSSL=False, lostDelay=10, failedDelay=20, useColors=False) yield s.startService() # patch it up factory = self.factory proto_obj = mock.Mock(name='proto_obj') factory.protocol = mock.Mock(name='protocol', return_value=proto_obj) # run it p = factory.buildProtocol('address') self.assertIdentical(p, proto_obj) factory.protocol.assert_called_with( 'nick', 'pass', ['channels'], ['pm', 'to', 'nicks'], True, {}, ['tags'], {'successToFailure': 1}, useColors=False, useRevisions=True, showBlameList=False) def test_service(self): irc = self.makeIRC() # just put it through its paces irc.startService() return irc.stopService() # deprecated @defer.inlineCallbacks def test_allowForce_allowShutdown(self): s = self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, allowShutdown=False) yield s.startService() self.assertEqual(words.StatusBot.expand_authz(s.authz), {'FORCE': True, 'STOP': True, 'SHUTDOWN': False}) # deprecated def test_allowForce_with_authz(self): with self.assertRaises(ConfigErrors): self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, authz={'force': [12345]}) # deprecated def test_allowShutdown_with_authz(self): with self.assertRaises(ConfigErrors): self.makeIRC( host='host', nick='nick', channels=['channels'], allowForce=True, authz={'': [12345]}) buildbot-2.6.0/master/buildbot/test/unit/test_reporters_mail.py000066400000000000000000000411351361162603000250220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import copy import sys from email import charset from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process import properties from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters import mail from buildbot.reporters.mail import ESMTPSenderFactory from buildbot.reporters.mail import MailNotifier from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.notifier import NotifierTestMixin from buildbot.util import bytes2unicode from buildbot.util import ssl py_27 = sys.version_info[0] > 2 or (sys.version_info[0] == 2 and sys.version_info[1] >= 7) class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, NotifierTestMixin): if not ESMTPSenderFactory: skip = ("twisted-mail unavailable, " "see: https://twistedmatrix.com/trac/ticket/8770") def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupMailNotifier(self, *args, **kwargs): mn = MailNotifier(*args, **kwargs) yield mn.setServiceParent(self.master) yield mn.startService() return mn @defer.inlineCallbacks def test_change_name(self): mn = yield self.setupMailNotifier('from@example.org', name="custom_name") self.assertEqual(mn.name, "custom_name") @defer.inlineCallbacks def do_test_createEmail_cte(self, funnyChars, expEncoding): _, builds = yield self.setupBuildResults(SUCCESS) msgdict = create_msgdict(funnyChars) mn = yield self.setupMailNotifier('from@example.org') m = yield mn.createEmail(msgdict, 'builder-name', 'project-name', SUCCESS, builds) cte_lines = [l for l in m.as_string().split("\n") if l.startswith('Content-Transfer-Encoding:')] self.assertEqual(cte_lines, ['Content-Transfer-Encoding: %s' % expEncoding], repr(m.as_string())) def test_createEmail_message_content_transfer_encoding_7bit(self): # buildbot.reporters.mail.ENCODING is 'utf8' # On Python 3, the body_encoding for 'utf8' is base64. # On Python 2, the body_encoding for 'utf8' is None. # If the body_encoding is None, the email package # will try to deduce the 'Content-Transfer-Encoding' # by calling email.encoders.encode_7or8bit(). # If the foo.encode('ascii') works on the body, it # is assumed '7bit'. If it fails, it is assumed '8bit'. input_charset = charset.Charset(mail.ENCODING) if input_charset.body_encoding == charset.BASE64: expEncoding = 'base64' elif input_charset.body_encoding is None: expEncoding = '7bit' return self.do_test_createEmail_cte("old fashioned ascii", expEncoding) def test_createEmail_message_content_transfer_encoding_8bit(self): # buildbot.reporters.mail.ENCODING is 'utf8' # On Python 3, the body_encoding for 'utf8' is base64. # On Python 2, the body_encoding for 'utf8' is None. # If the body_encoding is None, the email package # will try to deduce the 'Content-Transfer-Encoding' # by calling email.encoders.encode_7or8bit(). # If the foo.encode('ascii') works on the body, it input_charset = charset.Charset(mail.ENCODING) if input_charset.body_encoding == charset.BASE64: expEncoding = 'base64' elif input_charset.body_encoding is None: expEncoding = '8bit' return self.do_test_createEmail_cte("\U0001F4A7", expEncoding) @defer.inlineCallbacks def test_createEmail_message_without_patch_and_log_contains_unicode(self): _, builds = yield self.setupBuildResults(SUCCESS) msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org') m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', SUCCESS, builds) try: m.as_string() except UnicodeEncodeError: self.fail('Failed to call as_string() on email message.') @defer.inlineCallbacks def test_createEmail_extraHeaders_one_build(self): _, builds = yield self.setupBuildResults(SUCCESS) builds[0]['properties']['hhh'] = ('vvv', 'fake') msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org', extraHeaders=dict(hhh=properties.Property('hhh'))) # add some Unicode to detect encoding problems m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', SUCCESS, builds) txt = m.as_string() # note that the headers *are* rendered self.assertIn('hhh: vvv', txt) @defer.inlineCallbacks def test_createEmail_extraHeaders_two_builds(self): _, builds = yield self.setupBuildResults(SUCCESS) builds.append(copy.deepcopy(builds[0])) builds[1]['builder']['name'] = 'builder2' msgdict = create_msgdict() mn = yield self.setupMailNotifier('from@example.org', extraHeaders=dict(hhh='vvv')) m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', SUCCESS, builds) txt = m.as_string() # note that the headers are *not* rendered self.assertIn('hhh: vvv', txt) @defer.inlineCallbacks def test_createEmail_message_with_patch_and_log_containing_unicode(self): _, builds = yield self.setupBuildResults(SUCCESS) msgdict = create_msgdict() patches = [{'body': '\u00E5\u00E4\u00F6'}] logs = yield self.master.data.get(("steps", 50, 'logs')) for l in logs: l['stepname'] = "fakestep" l['content'] = yield self.master.data.get(("logs", l['logid'], 'contents')) mn = yield self.setupMailNotifier('from@example.org', addLogs=True) m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', SUCCESS, builds, patches, logs) try: s = m.as_string() # python 2.6 default transfer in base64 for utf-8 if "base64" not in s: self.assertIn("Unicode log", s) else: # b64encode and remove '=' padding (hence [:-1]) logStr = bytes2unicode(base64.b64encode(b"Unicode log")[:-1]) self.assertIn(logStr, s) self.assertIn( 'Content-Disposition: attachment; filename="fakestep.stdio"', s) except UnicodeEncodeError: self.fail('Failed to call as_string() on email message.') @defer.inlineCallbacks def setupBuildMessage(self, **mnKwargs): _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupMailNotifier('from@example.org', **mnKwargs) mn.messageFormatter = Mock(spec=mn.messageFormatter) mn.messageFormatter.formatMessageForBuildResults.return_value = {"body": "body", "type": "text", "subject": "subject"} mn.findInterrestedUsersEmails = Mock( spec=mn.findInterrestedUsersEmails) mn.findInterrestedUsersEmails.return_value = "" mn.processRecipients = Mock(spec=mn.processRecipients) mn.processRecipients.return_value = "" mn.createEmail = Mock(spec=mn.createEmail) mn.createEmail.return_value = "" mn.sendMail = Mock(spec=mn.sendMail) yield mn.buildMessage("mybldr", builds, SUCCESS) return (mn, builds) @defer.inlineCallbacks def test_buildMessage(self): mn, builds = yield self.setupBuildMessage(mode=("change",)) build = builds[0] mn.messageFormatter.formatMessageForBuildResults.assert_called_with( ('change',), 'mybldr', build['buildset'], build, self.master, None, ['me@foo']) mn.findInterrestedUsersEmails.assert_called_with(['me@foo']) mn.processRecipients.assert_called_with('', '') mn.sendMail.assert_called_with('', '') self.assertEqual(mn.createEmail.call_count, 1) @defer.inlineCallbacks def do_test_sendToInterestedUsers(self, lookup=None, extraRecipients=None, sendToInterestedUsers=True, exp_called_with=None, exp_TO=None, exp_CC=None): if extraRecipients is None: extraRecipients = [] _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupMailNotifier('from@example.org', lookup=lookup, extraRecipients=extraRecipients, sendToInterestedUsers=sendToInterestedUsers) recipients = yield mn.findInterrestedUsersEmails(['Big Bob ', 'narrator']) m = {'To': None, 'CC': None} all_recipients = mn.processRecipients(recipients, m) self.assertEqual(sorted(all_recipients), sorted(exp_called_with)) self.assertEqual(m['To'], exp_TO) self.assertEqual(m['CC'], exp_CC) def test_sendToInterestedUsers_lookup(self): return self.do_test_sendToInterestedUsers( lookup="example.org", exp_called_with=['Big Bob ', 'narrator@example.org'], exp_TO='"=?utf-8?q?Big_Bob?=" , ' 'narrator@example.org') def test_buildMessage_sendToInterestedUsers_no_lookup(self): return self.do_test_sendToInterestedUsers( exp_called_with=['Big Bob '], exp_TO='"=?utf-8?q?Big_Bob?=" ') def test_buildMessage_sendToInterestedUsers_extraRecipients(self): return self.do_test_sendToInterestedUsers( extraRecipients=["marla@mayhem.net"], exp_called_with=['Big Bob ', 'marla@mayhem.net'], exp_TO='"=?utf-8?q?Big_Bob?=" ', exp_CC="marla@mayhem.net") def test_sendToInterestedUsers_False(self): return self.do_test_sendToInterestedUsers( extraRecipients=["marla@mayhem.net"], sendToInterestedUsers=False, exp_called_with=['marla@mayhem.net'], exp_TO="marla@mayhem.net") def test_valid_emails(self): valid_emails = [ 'foo+bar@example.com', # + comment in local part 'nobody@example.com.', # root dot 'My Name ', # With full name '', # With <> 'My Name ', # With full name (root dot) 'egypt@example.xn--wgbh1c'] # IDN TLD (.misr, Egypt) # If any of these email addresses fail, the test fails by # yield self.setupMailNotifier raising a ConfigErrors exception. MailNotifier('foo@example.com', extraRecipients=valid_emails) def test_invalid_email(self): for invalid in ['@', 'foo', 'foo@', '@example.com', 'foo@invalid', 'foobar@ex+ample.com', # + in domain part # whitespace in local part 'foo bar@example.net', 'Foo\nBar ', # newline in name 'test@example..invalid']: # empty label (..) with self.assertRaises(ConfigErrors): MailNotifier('foo@example.com', extraRecipients=[invalid]) @defer.inlineCallbacks def test_sendMail_real_name_addresses(self): fakeSenderFactory = Mock() fakeSenderFactory.side_effect = lambda *args, **kwargs: args[ 5].callback(True) self.patch(mail, 'ESMTPSenderFactory', fakeSenderFactory) self.patch(mail, 'reactor', Mock()) msg = Mock() msg.as_string = Mock(return_value='') mn = yield self.setupMailNotifier('John Doe ') yield mn.sendMail(msg, ['Jane Doe ']) self.assertIsInstance(fakeSenderFactory.call_args, tuple) self.assertTrue(len(fakeSenderFactory.call_args) > 0) self.assertTrue(len(fakeSenderFactory.call_args[0]) > 3) self.assertEquals(fakeSenderFactory.call_args[0][2], 'john.doe@domain.tld') self.assertEquals(fakeSenderFactory.call_args[0][3], ['jane.doe@domain.tld']) @defer.inlineCallbacks def do_test_sendMessage(self, **mnKwargs): fakeSenderFactory = Mock() fakeSenderFactory.side_effect = lambda *args, **kwargs: args[ 5].callback(True) self.patch(mail, 'ESMTPSenderFactory', fakeSenderFactory) _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupMailNotifier('from@example.org', **mnKwargs) mn.messageFormatter = Mock(spec=mn.messageFormatter) mn.messageFormatter.formatMessageForBuildResults.return_value = {"body": "body", "type": "text", "subject": "subject"} mn.findInterrestedUsersEmails = Mock( spec=mn.findInterrestedUsersEmails) mn.findInterrestedUsersEmails.return_value = list("") mn.processRecipients = Mock(spec=mn.processRecipients) mn.processRecipients.return_value = list("") mn.createEmail = Mock(spec=mn.createEmail) mn.createEmail.return_value.as_string = Mock(return_value="") yield mn.buildMessage("mybldr", builds, SUCCESS) return (mn, builds) @defer.inlineCallbacks def test_sendMessageOverTcp(self): fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, builds = yield self.do_test_sendMessage() self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectTCP', ('localhost', 25, None), {}), fakereactor.method_calls) @defer.inlineCallbacks def test_sendMessageWithInterpolatedConfig(self): """Test that the secrets parameters are properly interpolated at reconfig stage Note: in the unit test, we don't test that it is interpolated with secret. That would require setting up secret manager. We just test that the interpolation works. """ fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, builds = yield self.do_test_sendMessage(smtpUser=Interpolate("u$er"), smtpPassword=Interpolate("pa$$word")) self.assertEqual(mn.smtpUser, "u$er") self.assertEqual(mn.smtpPassword, "pa$$word") self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectTCP', ('localhost', 25, None), {}), fakereactor.method_calls) @ssl.skipUnless @defer.inlineCallbacks def test_sendMessageOverSsl(self): fakereactor = Mock() self.patch(mail, 'reactor', fakereactor) mn, builds = yield self.do_test_sendMessage(useSmtps=True) self.assertEqual(1, len(fakereactor.method_calls)) self.assertIn(('connectSSL', ('localhost', 25, None, fakereactor.connectSSL.call_args[ 0][3]), {}), fakereactor.method_calls) def create_msgdict(funny_chars='\u00E5\u00E4\u00F6'): unibody = 'Unicode body with non-ascii (%s).' % funny_chars msg_dict = dict(body=unibody, type='plain') return msg_dict buildbot-2.6.0/master/buildbot/test/unit/test_reporters_message.py000066400000000000000000000136301361162603000255230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters import message from buildbot.reporters import utils from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin class TestMessage(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) self.message = message.MessageFormatter() self.messageMissing = message.MessageFormatterMissingWorker() def setupDb(self, results1, results2): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrkr'), fakedb.Buildset(id=98, results=results1, reason="testReason1"), fakedb.Buildset(id=99, results=results2, reason="testReason2"), fakedb.Builder(id=80, name='Builder1'), fakedb.BuildRequest(id=11, buildsetid=98, builderid=80), fakedb.BuildRequest(id=12, buildsetid=99, builderid=80), fakedb.Build(id=20, number=0, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=results1), fakedb.Build(id=21, number=1, builderid=80, buildrequestid=12, workerid=13, masterid=92, results=results1), ]) for _id in (20, 21): self.db.insertTestData([ fakedb.BuildProperty( buildid=_id, name="workername", value="wrkr"), fakedb.BuildProperty( buildid=_id, name="reason", value="because"), ]) @defer.inlineCallbacks def doOneTest(self, lastresults, results, mode="all"): self.setupDb(results, lastresults) res = yield utils.getDetailsForBuildset(self.master, 99, wantProperties=True) build = res['builds'][0] buildset = res['buildset'] res = yield self.message.formatMessageForBuildResults( mode, "Builder1", buildset, build, self.master, lastresults, ["him@bar", "me@foo"]) return res @defer.inlineCallbacks def test_message_success(self): res = yield self.doOneTest(SUCCESS, SUCCESS) self.assertEqual(res['type'], "plain") self.assertEqual(res['body'], textwrap.dedent('''\ The Buildbot has detected a passing build on builder Builder1 while building Buildbot. Full details are available at: http://localhost:8080/#builders/80/builds/1 Buildbot URL: http://localhost:8080/ Worker for this Build: wrkr Build Reason: because Blamelist: him@bar, me@foo Build succeeded! Sincerely, -The Buildbot''')) self.assertTrue('subject' not in res) @defer.inlineCallbacks def test_inline_template(self): self.message = message.MessageFormatter(template="URL: {{ build_url }} -- {{ summary }}") res = yield self.doOneTest(SUCCESS, SUCCESS) self.assertEqual(res['type'], "plain") self.assertEqual(res['body'], "URL: http://localhost:8080/#builders/80/builds/1 -- Build succeeded!") @defer.inlineCallbacks def test_inline_subject(self): self.message = message.MessageFormatter(subject="subject") res = yield self.doOneTest(SUCCESS, SUCCESS) self.assertEqual(res['subject'], "subject") @defer.inlineCallbacks def test_message_failure(self): res = yield self.doOneTest(SUCCESS, FAILURE) self.assertIn( "The Buildbot has detected a failed build on builder", res['body']) @defer.inlineCallbacks def test_message_failure_change(self): res = yield self.doOneTest(SUCCESS, FAILURE, "change") self.assertIn( "The Buildbot has detected a new failure on builder", res['body']) @defer.inlineCallbacks def test_message_success_change(self): res = yield self.doOneTest(FAILURE, SUCCESS, "change") self.assertIn( "The Buildbot has detected a restored build on builder", res['body']) @defer.inlineCallbacks def test_message_success_nochange(self): res = yield self.doOneTest(SUCCESS, SUCCESS, "change") self.assertIn( "The Buildbot has detected a passing build on builder", res['body']) @defer.inlineCallbacks def test_message_failure_nochange(self): res = yield self.doOneTest(FAILURE, FAILURE, "change") self.assertIn( "The Buildbot has detected a failed build on builder", res['body']) @defer.inlineCallbacks def test_missing_worker(self): self.setupDb(SUCCESS, SUCCESS) workers = yield self.master.data.get(('workers',)) worker = workers[0] worker['notify'] = ['e@mail'] worker['last_connection'] = ['yesterday'] res = yield self.messageMissing.formatMessageForMissingWorker(self.master, worker) text = res['body'] self.assertIn("has noticed that the worker named wrkr went away", text) buildbot-2.6.0/master/buildbot/test/unit/test_reporters_notifier.py000066400000000000000000000343431361162603000257220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import sys from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters.notifier import NotifierBase from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.notifier import NotifierTestMixin py_27 = sys.version_info[0] > 2 or (sys.version_info[0] == 2 and sys.version_info[1] >= 7) class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase, NotifierTestMixin): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) @defer.inlineCallbacks def setupNotifier(self, *args, **kwargs): mn = NotifierBase(*args, **kwargs) mn.sendMessage = Mock(spec=mn.sendMessage) mn.sendMessage.return_value = "" yield mn.setServiceParent(self.master) yield mn.startService() return mn def test_init_enforces_tags_and_builders_are_mutually_exclusive(self): with self.assertRaises(config.ConfigErrors): NotifierBase(tags=['fast', 'slow'], builders=['a', 'b']) def test_init_warns_notifier_mode_all_in_iter(self): with self.assertRaisesConfigError( "mode 'all' is not valid in an iterator and must be passed in as a separate string"): NotifierBase(mode=['all']) @defer.inlineCallbacks def test_buildsetComplete_sends_message(self): _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupNotifier(buildSetSummary=True, mode=("failing", "passing", "warnings"), builders=["Builder1", "Builder2"]) mn.buildMessage = Mock() yield mn.buildsetComplete('buildset.98.complete', dict(bsid=98)) mn.buildMessage.assert_called_with( "whole buildset", builds, SUCCESS) self.assertEqual(mn.buildMessage.call_count, 1) @defer.inlineCallbacks def test_buildsetComplete_doesnt_send_message(self): _, builds = yield self.setupBuildResults(SUCCESS) # disable passing... mn = yield self.setupNotifier(buildSetSummary=True, mode=("failing", "warnings"), builders=["Builder1", "Builder2"]) mn.buildMessage = Mock() yield mn.buildsetComplete('buildset.98.complete', dict(bsid=98)) self.assertFalse(mn.buildMessage.called) @defer.inlineCallbacks def test_isMessageNeeded_ignores_unspecified_tags(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags build['builder']['tags'] = ['slow'] mn = yield self.setupNotifier(tags=["fast"]) self.assertFalse(mn.isMessageNeeded(build)) @defer.inlineCallbacks def test_isMessageNeeded_tags(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags build['builder']['tags'] = ['fast'] mn = yield self.setupNotifier(tags=["fast"]) self.assertTrue(mn.isMessageNeeded(build)) @defer.inlineCallbacks def test_isMessageNeeded_schedulers_sends_mail(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags mn = yield self.setupNotifier(schedulers=['checkin']) self.assertTrue(mn.isMessageNeeded(build)) @defer.inlineCallbacks def test_isMessageNeeded_schedulers_doesnt_send_mail(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags mn = yield self.setupNotifier(schedulers=['some-random-scheduler']) self.assertFalse(mn.isMessageNeeded(build)) @defer.inlineCallbacks def test_isMessageNeeded_branches_sends_mail(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags mn = yield self.setupNotifier(branches=['master']) self.assertTrue(mn.isMessageNeeded(build)) @defer.inlineCallbacks def test_isMessageNeeded_branches_doesnt_send_mail(self): _, builds = yield self.setupBuildResults(SUCCESS) build = builds[0] # force tags mn = yield self.setupNotifier(branches=['some-random-branch']) self.assertFalse(mn.isMessageNeeded(build)) @defer.inlineCallbacks def run_simple_test_sends_message_for_mode(self, mode, result, shouldSend=True): _, builds = yield self.setupBuildResults(result) mn = yield self.setupNotifier(mode=mode) self.assertEqual(mn.isMessageNeeded(builds[0]), shouldSend) def run_simple_test_ignores_message_for_mode(self, mode, result): return self.run_simple_test_sends_message_for_mode(mode, result, False) def test_isMessageNeeded_mode_all_for_success(self): return self.run_simple_test_sends_message_for_mode("all", SUCCESS) def test_isMessageNeeded_mode_all_for_failure(self): return self.run_simple_test_sends_message_for_mode("all", FAILURE) def test_isMessageNeeded_mode_all_for_warnings(self): return self.run_simple_test_sends_message_for_mode("all", WARNINGS) def test_isMessageNeeded_mode_all_for_exception(self): return self.run_simple_test_sends_message_for_mode("all", EXCEPTION) def test_isMessageNeeded_mode_all_for_cancelled(self): return self.run_simple_test_sends_message_for_mode("all", CANCELLED) def test_isMessageNeeded_mode_failing_for_success(self): return self.run_simple_test_ignores_message_for_mode("failing", SUCCESS) def test_isMessageNeeded_mode_failing_for_failure(self): return self.run_simple_test_sends_message_for_mode("failing", FAILURE) def test_isMessageNeeded_mode_failing_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("failing", WARNINGS) def test_isMessageNeeded_mode_failing_for_exception(self): return self.run_simple_test_ignores_message_for_mode("failing", EXCEPTION) def test_isMessageNeeded_mode_exception_for_success(self): return self.run_simple_test_ignores_message_for_mode("exception", SUCCESS) def test_isMessageNeeded_mode_exception_for_failure(self): return self.run_simple_test_ignores_message_for_mode("exception", FAILURE) def test_isMessageNeeded_mode_exception_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("exception", WARNINGS) def test_isMessageNeeded_mode_exception_for_exception(self): return self.run_simple_test_sends_message_for_mode("exception", EXCEPTION) def test_isMessageNeeded_mode_warnings_for_success(self): return self.run_simple_test_ignores_message_for_mode("warnings", SUCCESS) def test_isMessageNeeded_mode_warnings_for_failure(self): return self.run_simple_test_sends_message_for_mode("warnings", FAILURE) def test_isMessageNeeded_mode_warnings_for_warnings(self): return self.run_simple_test_sends_message_for_mode("warnings", WARNINGS) def test_isMessageNeeded_mode_warnings_for_exception(self): return self.run_simple_test_ignores_message_for_mode("warnings", EXCEPTION) def test_isMessageNeeded_mode_passing_for_success(self): return self.run_simple_test_sends_message_for_mode("passing", SUCCESS) def test_isMessageNeeded_mode_passing_for_failure(self): return self.run_simple_test_ignores_message_for_mode("passing", FAILURE) def test_isMessageNeeded_mode_passing_for_warnings(self): return self.run_simple_test_ignores_message_for_mode("passing", WARNINGS) def test_isMessageNeeded_mode_passing_for_exception(self): return self.run_simple_test_ignores_message_for_mode("passing", EXCEPTION) @defer.inlineCallbacks def run_sends_message_for_problems(self, mode, results1, results2, shouldSend=True): _, builds = yield self.setupBuildResults(results2) mn = yield self.setupNotifier(mode=mode) build = builds[0] if results1 is not None: build['prev_build'] = copy.deepcopy(builds[0]) build['prev_build']['results'] = results1 else: build['prev_build'] = None self.assertEqual(mn.isMessageNeeded(builds[0]), shouldSend) def test_isMessageNeeded_mode_problem_sends_on_problem(self): return self.run_sends_message_for_problems("problem", SUCCESS, FAILURE, True) def test_isMessageNeeded_mode_problem_ignores_successful_build(self): return self.run_sends_message_for_problems("problem", SUCCESS, SUCCESS, False) def test_isMessageNeeded_mode_problem_ignores_two_failed_builds_in_sequence(self): return self.run_sends_message_for_problems("problem", FAILURE, FAILURE, False) def test_isMessageNeeded_mode_change_sends_on_change(self): return self.run_sends_message_for_problems("change", FAILURE, SUCCESS, True) def test_isMessageNeeded_mode_change_sends_on_failure(self): return self.run_sends_message_for_problems("change", SUCCESS, FAILURE, True) def test_isMessageNeeded_mode_change_ignores_first_build(self): return self.run_sends_message_for_problems("change", None, FAILURE, False) def test_isMessageNeeded_mode_change_ignores_first_build2(self): return self.run_sends_message_for_problems("change", None, SUCCESS, False) def test_isMessageNeeded_mode_change_ignores_same_result_in_sequence(self): return self.run_sends_message_for_problems("change", SUCCESS, SUCCESS, False) def test_isMessageNeeded_mode_change_ignores_same_result_in_sequence2(self): return self.run_sends_message_for_problems("change", FAILURE, FAILURE, False) @defer.inlineCallbacks def setupBuildMessage(self, **mnKwargs): _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupNotifier(**mnKwargs) mn.messageFormatter = Mock(spec=mn.messageFormatter) mn.messageFormatter.formatMessageForBuildResults.return_value = {"body": "body", "type": "text", "subject": "subject"} yield mn.buildMessage("mybldr", builds, SUCCESS) return (mn, builds) @defer.inlineCallbacks def test_buildMessage_nominal(self): mn, builds = yield self.setupBuildMessage(mode=("change",)) build = builds[0] mn.messageFormatter.formatMessageForBuildResults.assert_called_with( ('change',), 'mybldr', build['buildset'], build, self.master, None, ['me@foo']) self.assertEqual(mn.sendMessage.call_count, 1) mn.sendMessage.assert_called_with('body', 'subject', 'text', 'mybldr', SUCCESS, builds, ['me@foo'], [], []) @defer.inlineCallbacks def test_buildMessage_addLogs(self): mn, builds = yield self.setupBuildMessage(mode=("change",), addLogs=True) self.assertEqual(mn.sendMessage.call_count, 1) # make sure the logs are send self.assertEqual(mn.sendMessage.call_args[0][8][0]['logid'], 60) # make sure the log has content self.assertIn( "log with", mn.sendMessage.call_args[0][8][0]['content']['content']) @defer.inlineCallbacks def test_buildMessage_addPatch(self): mn, builds = yield self.setupBuildMessage(mode=("change",), addPatch=True) self.assertEqual(mn.sendMessage.call_count, 1) # make sure the patch are sent self.assertEqual(mn.sendMessage.call_args[0][7], [{'author': 'him@foo', 'body': b'hello, world', 'comment': 'foo', 'level': 3, 'patchid': 99, 'subdir': '/foo'}]) @defer.inlineCallbacks def test_buildMessage_addPatchNoPatch(self): SourceStamp = fakedb.SourceStamp class NoPatchSourcestamp(SourceStamp): def __init__(self, id, patchid): super().__init__(id=id) self.patch(fakedb, 'SourceStamp', NoPatchSourcestamp) mn, builds = yield self.setupBuildMessage(mode=("change",), addPatch=True) self.assertEqual(mn.sendMessage.call_count, 1) # make sure no patches are sent self.assertEqual(mn.sendMessage.call_args[0][7], []) @defer.inlineCallbacks def test_workerMissingSendMessage(self): mn = yield self.setupNotifier(watchedWorkers=['myworker']) yield mn.workerMissing('worker.98.complete', dict(name='myworker', notify=["workeradmin@example.org"], workerinfo=dict(admin="myadmin"), last_connection="yesterday")) self.assertEqual(mn.sendMessage.call_count, 1) text = mn.sendMessage.call_args[0][0] recipients = mn.sendMessage.call_args[1]['users'] self.assertEqual(recipients, ['workeradmin@example.org']) self.assertIn( b"has noticed that the worker named myworker went away", text) buildbot-2.6.0/master/buildbot/test/unit/test_reporters_pushjet.py000066400000000000000000000073541361162603000255670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest import SkipTest from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.pushjet import PushjetNotifier from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import httpclientservice class TestPushjetNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupFakeHttp(self, base_url='https://api.pushjet.io'): return self.successResultOf(fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, base_url)) @defer.inlineCallbacks def setupPushjetNotifier(self, secret=Interpolate("1234"), **kwargs): pn = PushjetNotifier(secret, **kwargs) yield pn.setServiceParent(self.master) yield pn.startService() return pn @defer.inlineCallbacks def test_sendMessage(self): _http = self.setupFakeHttp() pn = yield self.setupPushjetNotifier(levels={'passing': 2}) _http.expect("post", "/message", data={'secret': "1234", 'level': 2, 'message': "Test", 'title': "Tee"}, content_json={'status': 'ok'}) n = yield pn.sendMessage(body="Test", subject="Tee", results=SUCCESS) j = yield n.json() self.assertEqual(j['status'], 'ok') @defer.inlineCallbacks def test_sendNotification(self): _http = self.setupFakeHttp('https://tests.io') pn = yield self.setupPushjetNotifier(base_url='https://tests.io') _http.expect("post", "/message", data={'secret': "1234", 'message': "Test"}, content_json={'status': 'ok'}) n = yield pn.sendNotification({'message': "Test"}) j = yield n.json() self.assertEqual(j['status'], 'ok') @defer.inlineCallbacks def test_sendRealNotification(self): secret = os.environ.get('TEST_PUSHJET_SECRET') if secret is None: raise SkipTest("real pushjet test runs only if the variable " "TEST_PUSHJET_SECRET is defined") _http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushjet.io') yield _http.startService() pn = yield self.setupPushjetNotifier(secret=secret) n = yield pn.sendNotification({'message': "Buildbot Pushjet test passed!"}) j = yield n.json() self.assertEqual(j['status'], 'ok') # Test with: # TEST_PUSHJET_SECRET=edcfaf21ab1bbad7b12bd7602447e6cb # https://api.pushjet.io/message?uuid=b8b8b8b8-0000-b8b8-0000-b8b8b8b8b8b8 buildbot-2.6.0/master/buildbot/test/unit/test_reporters_pushover.py000066400000000000000000000075371361162603000257630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from unittest import SkipTest from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.reporters.pushover import PushoverNotifier from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import httpclientservice class TestPushoverNotifier(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupFakeHttp(self): return self.successResultOf(fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.pushover.net')) @defer.inlineCallbacks def setupPushoverNotifier(self, user_key="1234", api_token=Interpolate("abcd"), **kwargs): pn = PushoverNotifier(user_key, api_token, **kwargs) yield pn.setServiceParent(self.master) yield pn.startService() return pn @defer.inlineCallbacks def test_sendMessage(self): _http = self.setupFakeHttp() pn = yield self.setupPushoverNotifier(priorities={'passing': 2}) _http.expect("post", "/1/messages.json", params={'user': "1234", 'token': "abcd", 'message': "Test", 'title': "Tee", 'priority': 2}, content_json={'status': 1, 'request': '98765'}) n = yield pn.sendMessage(body="Test", subject="Tee", results=SUCCESS) j = yield n.json() self.assertEqual(j['status'], 1) self.assertEqual(j['request'], '98765') @defer.inlineCallbacks def test_sendNotification(self): _http = self.setupFakeHttp() pn = yield self.setupPushoverNotifier(otherParams={'sound': "silent"}) _http.expect("post", "/1/messages.json", params={'user': "1234", 'token': "abcd", 'sound': "silent", 'message': "Test"}, content_json={'status': 1, 'request': '98765'}) n = yield pn.sendNotification({'message': "Test"}) j = yield n.json() self.assertEqual(j['status'], 1) self.assertEqual(j['request'], '98765') @defer.inlineCallbacks def test_sendRealNotification(self): creds = os.environ.get('TEST_PUSHOVER_CREDENTIALS') if creds is None: raise SkipTest("real pushover test runs only if the variable " "TEST_PUSHOVER_CREDENTIALS is defined") user, token = creds.split(':') _http = yield httpclientservice.HTTPClientService.getService( self.master, 'https://api.pushover.net') yield _http.startService() pn = yield self.setupPushoverNotifier(user_key=user, api_token=token) n = yield pn.sendNotification({'message': "Buildbot Pushover test passed!"}) j = yield n.json() self.assertEqual(j['status'], 1) buildbot-2.6.0/master/buildbot/test/unit/test_reporters_telegram.py000066400000000000000000001067431361162603000257070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import sys from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.reporters import telegram from buildbot.reporters import words from buildbot.schedulers import forcesched from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.web import FakeRequest from buildbot.test.unit.test_reporters_words import ContactMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.util import unicode2bytes class FakeChannel(service.AsyncService): pass class FakeContact: def __init__(self, user=None, channel=None): super().__init__() self.user_id = user['id'] self.user_info = user self.channel = FakeChannel self.channel.chat_info = channel.chat_info self.template = None self.messages = [] def handleMessage(self, message, **kwargs): self.messages.append(message) return defer.succeed(message) class TestTelegramContact(ContactMixin, unittest.TestCase): channelClass = telegram.TelegramChannel contactClass = telegram.TelegramContact class botClass(words.StatusBot): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.query_cache = {} def send_message(self, chat, message, **kwargs): return {'message_id': 123} def edit_message(bot, chat, msgid, message, **kwargs): return {'message_id': 123} def delete_message(bot, chat, msgid): pass def send_sticker(bot, chat, sticker, **kwargs): pass def edit_keyboard(self, chat, msg, keyboard=None): pass def getChannel(self, channel): return self.channelClass(self, channel) USER = { "id": 123456789, "first_name": "Harry", "last_name": "Potter", "username": "harrypotter", } CHANNEL = { "id": -12345678, "title": "Hogwards", "type": "group" } PRIVATE = { "id": 123456789, "type": "private" } def patch_send(self): self.sent = [] self.stickers = 0 def send_message(chat, message, **kwargs): self.sent.append((chat, message, kwargs)) return {'message_id': 123} self.bot.send_message = send_message def send_sticker(chat, sticker, **kwargs): self.stickers += 1 self.bot.send_sticker = send_sticker @defer.inlineCallbacks def setUp(self): ContactMixin.setUp(self) self.contact1 = self.contactClass(user=self.USER, channel=self.channelClass(self.bot, self.PRIVATE)) yield self.contact1.channel.setServiceParent(self.master) @defer.inlineCallbacks def test_list_notified_events(self): self.patch_send() channel = telegram.TelegramChannel(self.bot, self.CHANNEL) channel.notify_events = {'success'} yield channel.list_notified_events() self.assertEquals(self.sent[0][1], "The following events are being notified:\n🔔 **success**") @defer.inlineCallbacks def test_list_notified_events_empty(self): self.patch_send() channel = telegram.TelegramChannel(self.bot, self.CHANNEL) channel.notify_events = set() yield channel.list_notified_events() self.assertEquals(self.sent[0][1], "🔕 No events are being notified.") def testDescribeUser(self): self.assertEquals(self.contact1.describeUser(), "Harry Potter (@harrypotter)") def testDescribeUserInGroup(self): self.assertEquals(self.contact.describeUser(), "Harry Potter (@harrypotter) on 'Hogwards'") @defer.inlineCallbacks def test_access_denied(self): self.patch_send() self.contact1.ACCESS_DENIED_MESSAGES = ["ACCESS DENIED"] yield self.contact1.access_denied(tmessage={'message_id': 123}) self.assertEqual("ACCESS DENIED", self.sent[0][1]) @defer.inlineCallbacks def test_access_denied_group(self): self.patch_send() self.contact.ACCESS_DENIED_MESSAGES = ["ACCESS DENIED"] yield self.contact.access_denied(tmessage={'message_id': 123}) self.assertEqual("ACCESS DENIED", self.sent[0][1]) def test_query_button_short(self): result = self.contact.query_button("Hello", "hello") self.assertEquals(result, {'text': "Hello", 'callback_data': "hello"}) def test_query_button_long(self): payload = 16 * "1234567890" key = hash(repr(payload)) result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(self.bot.query_cache[key], payload) def test_query_button_non_str(self): payload = {'data': "good"} key = hash(repr(payload)) result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(self.bot.query_cache[key], payload) def test_query_button_cache(self): payload = 16 * "1234567890" key = hash(repr(payload)) self.bot.query_cache[key] = payload result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key}) self.assertEquals(len(self.bot.query_cache), 1) def test_query_button_cache_conflict(self): payload = 16 * "1234567890" key = hash(repr(payload)) self.bot.query_cache[key] = "something other" result = self.contact.query_button("Hello", payload) self.assertEquals(result, {'text': "Hello", 'callback_data': key + 1}) self.assertEquals(self.bot.query_cache[key + 1], payload) @defer.inlineCallbacks def test_command_start(self): yield self.do_test_command('start', exp_usage=False) self.assertEqual(self.sent[0][0], self.CHANNEL['id']) @defer.inlineCallbacks def test_command_nay(self): yield self.do_test_command('nay', contact=self.contact1, tmessage={}) @defer.inlineCallbacks def test_command_nay_reply_markup(self): yield self.do_test_command('nay', tmessage={ 'reply_to_message': { 'message_id': 1234, 'reply_markup': {}, }}) @defer.inlineCallbacks def test_commmand_commands(self): yield self.do_test_command('commands') self.assertEqual(self.sent[0][0], self.CHANNEL['id']) @defer.inlineCallbacks def test_commmand_commands_botfather(self): yield self.do_test_command('commands', 'botfather') self.assertEqual(self.sent[0][0], self.CHANNEL['id']) self.assertRegex(self.sent[0][1], r"^\w+ - \S+") @defer.inlineCallbacks def test_command_getid_private(self): yield self.do_test_command('getid', contact=self.contact1) self.assertEqual(len(self.sent), 1) self.assertIn(str(self.USER['id']), self.sent[0][1]) @defer.inlineCallbacks def test_command_getid_group(self): yield self.do_test_command('getid') self.assertIn(str(self.USER['id']), self.sent[0][1]) self.assertIn(str(self.CHANNEL['id']), self.sent[1][1]) def assertButton(self, data, pos=None, sent=0): keyboard = self.sent[sent][2]['reply_markup']['inline_keyboard'] if pos is not None: r, c = pos self.assertEquals(keyboard[r][c]['callback_data'], data) else: dataset = [b['callback_data'] for row in keyboard for b in row] self.assertIn(data, dataset) @defer.inlineCallbacks def test_command_list(self): yield self.do_test_command('list') self.assertButton('/list builders') self.assertButton('/list workers') self.assertButton('/list changes') @defer.inlineCallbacks def test_command_list_builders(self): yield self.do_test_command('list', 'all builders') self.assertEqual(len(self.sent), 1) for builder in self.BUILDER_NAMES: self.assertIn('`%s` ❌' % builder, self.sent[0][1]) @defer.inlineCallbacks def test_command_list_workers(self): workers = ['worker1', 'worker2'] for worker in workers: self.master.db.workers.db.insertTestData([ fakedb.Worker(name=worker) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) for worker in workers: self.assertIn('`%s` ❌' % worker, self.sent[0][1]) @defer.inlineCallbacks def test_command_list_workers_online(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) self.assertNotIn('`linux1` ⚠️', self.sent[0][1]) self.assertIn('`linux2` ⚠️', self.sent[0][1]) @defer.inlineCallbacks def test_command_list_changes(self): self.master.db.workers.db.insertTestData([ fakedb.Change() ]) yield self.do_test_command('list', args='2 changes') self.assertEqual(len(self.sent), 2) @defer.inlineCallbacks def test_command_list_changes_long(self): self.master.db.workers.db.insertTestData([ fakedb.Change() for i in range(200) ]) yield self.do_test_command('list', args='all changes') self.assertIn('reply_markup', self.sent[1][2]) @defer.inlineCallbacks def test_command_watch(self): self.setupSomeBuilds() yield self.do_test_command('watch') self.assertButton('/watch builder1') @defer.inlineCallbacks def test_command_watch_no_builds(self): yield self.do_test_command('watch') @defer.inlineCallbacks def test_command_stop_no_args(self): self.setupSomeBuilds() yield self.do_test_command('stop') self.assertButton('/stop build builder1') @defer.inlineCallbacks def test_command_stop_ask_reason(self): self.patch_send() self.setupSomeBuilds() yield self.do_test_command('stop', 'build builder1') self.assertIn("give me the reason", self.sent[0][1]) self.assertEquals(self.contact.template, "/stop build builder1 {}") def test_ask_reply_group(self): self.patch_send() self.contact.ask_for_reply("test") self.assertEqual(self.sent[0][1], "Ok @harrypotter, now test...") def test_ask_reply_group_no_username(self): self.patch_send() self.contact.user_info = self.USER.copy() del self.contact.user_info['username'] self.contact.ask_for_reply("test") self.assertEqual(self.sent[0][1], "Ok, now reply to this message and test...") def test_ask_reply_group_no_username_no_greeting(self): self.patch_send() self.contact.user_info = self.USER.copy() del self.contact.user_info['username'] self.contact.ask_for_reply("test", None) self.assertEqual(self.sent[0][1], "Reply to this message and test...") def test_ask_reply_private_no_greeting(self): self.patch_send() self.contact1.ask_for_reply("test", None) self.assertEqual(self.sent[0][1], "Test...") @defer.inlineCallbacks def test_command_notify_no_args(self): self.patch_send() self.contact.channel.notify_events = {'success', 'failure'} yield self.do_test_command('notify') self.assertButton('/notify on-quiet finished') self.assertButton('/notify off-quiet success') self.assertButton('/notify list') @defer.inlineCallbacks def test_command_notify_list_with_query(self): self.patch_send() def delete_message(chat, msg): delete_message.msg = msg delete_message.msg = None self.bot.delete_message = delete_message yield self.do_test_command('notify', 'list', tquery={ 'message': {'message_id': 2345} }) self.assertEqual(delete_message.msg, 2345) @defer.inlineCallbacks def test_command_notify_toggle(self): self.patch_send() def edit_keyboard(chat, msg, keyboard): self.sent.append((chat, None, { 'reply_markup': {'inline_keyboard': keyboard}})) self.bot.edit_keyboard = edit_keyboard self.contact.channel.notify_events = {'success', 'failure'} yield self.do_test_command('notify', 'on-quiet finished', tquery={ 'message': {'message_id': 2345} }) self.assertIn('finished', self.contact.channel.notify_events) self.assertButton('/notify off-quiet finished') @defer.inlineCallbacks def test_command_shutdown(self): yield self.do_test_command('shutdown') self.assertButton('/shutdown start') self.assertButton('/shutdown now') @defer.inlineCallbacks def test_command_shutdown_shutting_down(self): yield self.do_test_command('shutdown', shuttingDown=True) self.assertButton('/shutdown stop') self.assertButton('/shutdown now') def allSchedulers(self): return self.schedulers def make_forcescheduler(self, two=False): scheduler = forcesched.ForceScheduler( name='force1', builderNames=['builder1', 'builder2'], codebases=[ forcesched.CodebaseParameter('', branch=forcesched.StringParameter( name='branch', default="master"), repository=forcesched.FixedParameter( name="repository", default="repository.git")), forcesched.CodebaseParameter('second', branch=forcesched.StringParameter( name='branch', default="master"), repository=forcesched.FixedParameter( name="repository", default="repository2.git"))], reason=forcesched.StringParameter( name='reason', required=True)) self.schedulers = [scheduler] if two: scheduler2 = forcesched.ForceScheduler( name='force2', builderNames=['builder2']) self.schedulers.append(scheduler2) self.bot.master.allSchedulers = self.allSchedulers @defer.inlineCallbacks def test_command_force_no_schedulers(self): yield self.do_test_command('force', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_noargs_multiple_schedulers(self): self.make_forcescheduler(two=True) yield self.do_test_command('force') self.assertButton('/force force1') self.assertButton('/force force2') @defer.inlineCallbacks def test_command_force_noargs(self): self.make_forcescheduler() yield self.do_test_command('force') self.assertButton('/force force1 config builder1') self.assertButton('/force force1 config builder2') @defer.inlineCallbacks def test_command_force_only_scheduler(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1') self.assertButton('/force force1 config builder1') self.assertButton('/force force1 config builder2') @defer.inlineCallbacks def test_command_force_bad_scheduler(self): self.make_forcescheduler(two=True) yield self.do_test_command('force', 'force3', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_bad_builder(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder0', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_bad_command(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 bad builder1', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_only_bad_command(self): self.make_forcescheduler() yield self.do_test_command('force', 'bad builder1', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_config(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1') self.assertButton('/force force1 ask reason builder1 ') self.assertButton('/force force1 ask branch builder1 ') self.assertButton('/force force1 ask project builder1 ') self.assertButton('/force force1 ask revision builder1 ') self.assertButton('/force force1 ask second_branch builder1 ') self.assertButton('/force force1 ask second_project builder1 ') self.assertButton('/force force1 ask second_revision builder1 ') @defer.inlineCallbacks def test_command_force_config_more(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1 branch=master') self.assertButton('/force force1 ask reason builder1 branch=master') @defer.inlineCallbacks def test_command_force_config_nothing_missing(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 config builder1 reason=Ok') self.assertButton('/force force1 build builder1 reason=Ok') @defer.inlineCallbacks def test_command_force_ask(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 ask reason builder1 branch=master') self.assertEqual(self.contact.template, '/force force1 config builder1 branch=master reason={}') @defer.inlineCallbacks def test_command_force_build_missing(self): self.make_forcescheduler() yield self.do_test_command('force', 'force1 build builder1') self.assertButton('/force force1 ask reason builder1 ') @defer.inlineCallbacks def test_command_force_build(self): self.make_forcescheduler() force_args = {} def force(**kwargs): force_args.update(kwargs) self.schedulers[0].force = force yield self.do_test_command('force', 'force1 build builder1 reason=Good') self.assertEqual(self.sent[0][1], "Force build successfully requested.") expected = { 'builderid': 23, 'owner': "Harry Potter (@harrypotter) on 'Hogwards'", 'reason': 'Good', 'repository': 'repository.git', # fixed param 'second_repository': 'repository2.git' # fixed param } self.assertEqual(force_args, expected) class TestPollingBot(telegram.TelegramPollingBot): def __init__(self, updates, *args, **kwargs): self.__updates = updates super().__init__(*args, **kwargs) def process_update(self, update): self.__updates -= 1 if not self.__updates: self._polling_continue = False return super().process_update(update) class TestTelegramService(TestReactorMixin, unittest.TestCase): USER = TestTelegramContact.USER CHANNEL = TestTelegramContact.CHANNEL PRIVATE = TestTelegramContact.PRIVATE def setUp(self): self.setUpTestReactor() self.patch(reactor, 'callLater', self.reactor.callLater) self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupFakeHttp(self): return self.successResultOf(fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.telegram.org/bot12345:secret')) def makeBot(self, chat_ids=None, authz=None, *args, **kwargs): if chat_ids is None: chat_ids = [] http = self.setupFakeHttp() return telegram.TelegramWebhookBot('12345:secret', http, chat_ids, authz, *args, **kwargs) def test_getContact(self): bot = self.makeBot() c1 = bot.getContact(self.USER, self.PRIVATE) c2 = bot.getContact(self.USER, self.CHANNEL) c1b = bot.getContact(self.USER, self.PRIVATE) self.assertIs(c1, c1b) self.assertIsInstance(c2, words.Contact) self.assertIn((-12345678, 123456789), bot.contacts) self.assertEqual({123456789, -12345678}, set(bot.channels.keys())) def test_getContact_update(self): try: bot = self.makeBot() contact = bot.getContact(self.USER, self.CHANNEL) updated_user = self.USER.copy() updated_user['username'] = "dirtyharry" self.assertEquals(contact.user_info['username'], "harrypotter") bot.getContact(updated_user, self.CHANNEL) self.assertEquals(contact.user_info['username'], "dirtyharry") finally: self.USER['username'] = "harrypotter" def test_getContact_invalid(self): bot = self.makeBot() bot.authz = {'': None} u = bot.getContact(user=self.USER, channel=self.CHANNEL) self.assertNotIn((-12345678, 123456789), bot.contacts) self.assertNotIn(-12345678, bot.channels) self.assertEqual(sys.getrefcount(u), 2) # local, sys c = u.channel self.assertEqual(sys.getrefcount(c), 3) # local, contact, sys del u self.assertEqual(sys.getrefcount(c), 2) # local, sys def test_getContact_valid(self): bot = self.makeBot() bot.authz = {'': None, 'command': 123456789} bot.getContact(user=self.USER, channel=self.CHANNEL) self.assertIn((-12345678, 123456789), bot.contacts) @defer.inlineCallbacks def test_set_webhook(self): bot = self.makeBot() bot.http_client.expect("post", "/setWebhook", json={'url': 'our.webhook'}, content_json={'ok': 1}) yield bot.set_webhook('our.webhook') @defer.inlineCallbacks def test_set_webhook_cert(self): bot = self.makeBot() bot.http_client.expect("post", "/setWebhook", data={'url': 'our.webhook'}, files={'certificate': b"this is certificate"}, content_json={'ok': 1}) yield bot.set_webhook('our.webhook', "this is certificate") @defer.inlineCallbacks def test_send_message(self): bot = self.makeBot() bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': 'Hello', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.send_message(1234, 'Hello') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_send_message_long(self): bot = self.makeBot() text1 = '\n'.join("{:039d}".format(i + 1) for i in range(102)) text2 = '\n'.join("{:039d}".format(i + 1) for i in range(102, 204)) text3 = '\n'.join("{:039d}".format(i + 1) for i in range(204, 250)) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text1, 'parse_mode': 'Markdown', 'reply_to_message_id': 1000}, content_json={'ok': 1, 'result': {'message_id': 1001}}) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text2, 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 1002}}) bot.http_client.expect("post", "/sendMessage", json={'chat_id': 1234, 'text': text3, 'parse_mode': 'Markdown', 'reply_markup': {'inline_keyboard': 'keyboard'}}, content_json={'ok': 1, 'result': {'message_id': 1003}}) text = '\n'.join("{:039d}".format(i + 1) for i in range(250)) m = yield bot.send_message(1234, text, reply_markup={'inline_keyboard': 'keyboard'}, reply_to_message_id=1000) self.assertEqual(m['message_id'], 1003) @defer.inlineCallbacks def test_edit_message(self): bot = self.makeBot() bot.http_client.expect("post", "/editMessageText", json={'chat_id': 1234, 'message_id': 9876, 'text': 'Hello', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.edit_message(1234, 9876, 'Hello') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_delete_message(self): bot = self.makeBot() bot.http_client.expect("post", "/deleteMessage", json={'chat_id': 1234, 'message_id': 9876}, content_json={'ok': 1}) yield bot.delete_message(1234, 9876) @defer.inlineCallbacks def test_send_sticker(self): bot = self.makeBot() bot.http_client.expect("post", "/sendSticker", json={'chat_id': 1234, 'sticker': 'xxxxx'}, content_json={'ok': 1, 'result': {'message_id': 9876}}) m = yield bot.send_sticker(1234, 'xxxxx') self.assertEqual(m['message_id'], 9876) @defer.inlineCallbacks def test_set_nickname(self): bot = self.makeBot() self.assertIsNone(bot.nickname) bot.http_client.expect("post", "/getMe", content_json={'ok': 1, 'result': {'username': 'testbot'}}) yield bot.set_nickname() self.assertEqual(bot.nickname, 'testbot') def prepare_request(self, **kwargs): payload = {"update_id": 12345} payload.update(kwargs) content = unicode2bytes(json.dumps(payload)) request = FakeRequest(content=content) request.uri = b"/bot12345:secret" request.method = b"POST" request.received_headers[b'Content-Type'] = b"application/json" return request def request_message(self, text): return self.prepare_request(message={ "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": text, }) def request_query(self, data): return self.prepare_request(callback_query={ "id": 123456, "from": self.USER, "data": data, "message": { "message_id": 12345, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, }}) def test_get_update(self): bot = self.makeBot() request = self.request_message("test") update = bot.get_update(request) self.assertEquals(update['message']['from'], self.USER) self.assertEquals(update['message']['chat'], self.CHANNEL) def test_get_update_bad_content_type(self): bot = self.makeBot() request = self.request_message("test") request.received_headers[b'Content-Type'] = b"application/data" with self.assertRaises(ValueError): bot.get_update(request) def test_render_POST(self): # This actually also tests process_incoming bot = self.makeBot() bot.contactClass = FakeContact request = self.request_message("test") bot.webhook.render_POST(request) contact = bot.getContact(self.USER, self.CHANNEL) self.assertEquals(contact.messages, ["test"]) def test_parse_query_cached(self): bot = self.makeBot() bot.contactClass = FakeContact bot.query_cache.update({ 100: "good" }) bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456}, content_json={'ok': 1}) request = self.request_query("100") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) def test_parse_query_cached_dict(self): bot = self.makeBot() bot.contactClass = FakeContact bot.query_cache = { 100: {'command': "good", 'notify': "hello"} } bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456, 'text': "hello"}, content_json={'ok': 1}) request = self.request_query("100") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) def test_parse_query_explicit(self): bot = self.makeBot() bot.contactClass = FakeContact bot.query_cache = { 100: "bad" } bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456}, content_json={'ok': 1}) request = self.request_query("good") bot.process_webhook(request) self.assertEquals(bot.getContact(self.USER, self.CHANNEL).messages, ["good"]) def test_parse_query_bad(self): bot = self.makeBot() bot.contactClass = FakeContact bot.query_cache.update({ 100: "bad" }) bot.http_client.expect("post", "/editMessageReplyMarkup", json={'chat_id': -12345678, 'message_id': 12345}, content_json={'ok': 1}) bot.http_client.expect("post", "/answerCallbackQuery", json={'callback_query_id': 123456, 'text': "Sorry, button is no longer valid!"}, content_json={'ok': 1}) request = self.request_query("101") bot.process_webhook(request) def makePollingBot(self, updates, chat_ids=None, authz=None, *args, **kwargs): if chat_ids is None: chat_ids = [] http = self.setupFakeHttp() return TestPollingBot(updates, '12345:secret', http, chat_ids, authz, *args, **kwargs) @defer.inlineCallbacks def test_polling(self): bot = self.makePollingBot(2) bot._polling_continue = True bot.http_client.expect("post", "/deleteWebhook", content_json={"ok": 1}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout}, content_json={ 'ok': 1, 'result': [{ "update_id": 10000, "message": { "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": "ignore"}}]}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout, "offset": 10001}, content_json={ 'ok': 1, 'result': [{ "update_id": 10001, "message": { "message_id": 124, "from": self.USER, "chat": self.CHANNEL, "date": 1566688889, "text": "/nay"}}]}) bot.http_client.expect( "post", "/sendMessage", json={'chat_id': -12345678, 'text': 'Never mind, Harry...', 'parse_mode': 'Markdown'}, content_json={'ok': 1, 'result': {'message_id': 125}}) yield bot.do_polling() def test_format_build_status(self): bot = self.makeBot() build = {'results': SUCCESS} self.assertEqual(bot.format_build_status(build), "completed successfully ✅") def test_format_build_status_short(self): bot = self.makeBot() build = {'results': WARNINGS} self.assertEqual(bot.format_build_status(build, short=True), " ⚠️") class HttpServiceWithErrors(fakehttpclientservice.HTTPClientService): def __init__(self, skip, errs, *args, **kwargs): self.__skip = skip self.__errs = errs self.succeeded = False super().__init__(*args, **kwargs) def post(self, ep, **kwargs): if self.__skip: self.__skip -= 1 else: if self.__errs: self.__errs -= 1 raise RuntimeError("{}".format(self.__errs + 1)) self.succeeded = True return super().post(ep, **kwargs) def setupFakeHttpWithErrors(self, skip, errs): return self.successResultOf(self.HttpServiceWithErrors.getFakeService( self.master, self, skip, errs, 'https://api.telegram.org/bot12345:secret')) @defer.inlineCallbacks def test_post_not_ok(self): bot = self.makeBot() bot.http_client.expect( "post", "/post", content_json={'ok': 0}) def log(msg): logs.append(msg) logs = [] bot.log = log yield bot.post("/post") self.assertIn("ERROR", logs[0]) def test_post_need_repeat(self): bot = self.makeBot() bot.http_client = self.setupFakeHttpWithErrors(0, 2) bot.http_client.expect( "post", "/post", content_json={'ok': 1}) def log(msg): logs.append(msg) logs = [] bot.log = log bot.post("/post") self.assertIn("ERROR", logs[0]) self.reactor.pump(3 * [30.]) self.assertTrue(bot.http_client.succeeded) def test_polling_need_repeat(self): bot = self.makePollingBot(1) bot.reactor = self.reactor bot.http_client = self.setupFakeHttpWithErrors(1, 2) bot._polling_continue = True bot.http_client.expect("post", "/deleteWebhook", content_json={"ok": 1}) bot.http_client.expect( "post", "/getUpdates", json={'timeout': bot.poll_timeout}, content_json={ 'ok': 1, 'result': [{ "update_id": 10000, "message": { "message_id": 123, "from": self.USER, "chat": self.CHANNEL, "date": 1566688888, "text": "ignore"}}]}) def log(msg): logs.append(msg) logs = [] bot.log = log bot.do_polling() self.assertIn("ERROR", logs[0]) self.reactor.pump(3 * [30.]) self.assertTrue(bot.http_client.succeeded) buildbot-2.6.0/master/buildbot/test/unit/test_reporters_utils.py000066400000000000000000000177121361162603000252440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.reporters import utils from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin class TestDataUtils(TestReactorMixin, unittest.TestCase, logging.LoggingMixin): LOGCONTENT = textwrap.dedent("""\ line zero line 1 """) def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantData=True, wantDb=True, wantMq=True) def setupDb(self): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=98, results=SUCCESS, reason="testReason1"), fakedb.Builder(id=80, name='Builder1'), fakedb.BuildRequest(id=9, buildsetid=97, builderid=80), fakedb.BuildRequest(id=10, buildsetid=97, builderid=80), fakedb.BuildRequest(id=11, buildsetid=98, builderid=80), fakedb.BuildRequest(id=12, buildsetid=98, builderid=80), fakedb.Build(id=18, number=0, builderid=80, buildrequestid=9, workerid=13, masterid=92, results=FAILURE), fakedb.Build(id=19, number=1, builderid=80, buildrequestid=10, workerid=13, masterid=92, results=RETRY), fakedb.Build(id=20, number=2, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=SUCCESS), fakedb.Build(id=21, number=3, builderid=80, buildrequestid=12, workerid=13, masterid=92, results=SUCCESS), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp(id=234), fakedb.Change(changeid=13, branch='trunk', revision='9283', author='me@foo', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=234), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='him@foo', patch_comment='foo', subdir='/foo', patchlevel=3), fakedb.SourceStamp(id=235, patchid=99), ]) for _id in (20, 21): self.db.insertTestData([ fakedb.BuildProperty( buildid=_id, name="workername", value="wrk"), fakedb.BuildProperty( buildid=_id, name="reason", value="because"), fakedb.BuildProperty( buildid=_id, name="owner", value="him"), fakedb.Step(id=100 + _id, buildid=_id, name="step1"), fakedb.Step(id=200 + _id, buildid=_id, name="step2"), fakedb.Log(id=60 + _id, stepid=100 + _id, name='stdio', slug='stdio', type='s', num_lines=2), fakedb.LogChunk(logid=60 + _id, first_line=0, last_line=1, compressed=0, content=self.LOGCONTENT), ]) @defer.inlineCallbacks def getChangesForBuild(buildid): assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild @defer.inlineCallbacks def test_getDetailsForBuildset(self): self.setupDb() res = yield utils.getDetailsForBuildset(self.master, 98, wantProperties=True, wantSteps=True, wantPreviousBuild=True) self.assertEqual(len(res['builds']), 2) build1 = res['builds'][0] build2 = res['builds'][1] buildset = res['buildset'] self.assertEqual(build1['properties'], {'reason': ('because', 'fakedb'), 'owner': ('him', 'fakedb'), 'workername': ('wrk', 'fakedb')}) self.assertEqual(len(build1['steps']), 2) self.assertEqual(build1['buildid'], 20) self.assertEqual(build2['buildid'], 21) self.assertEqual(buildset['bsid'], 98) # make sure prev_build was computed self.assertEqual(build1['prev_build']['buildid'], 18) self.assertEqual(build2['prev_build']['buildid'], 20) @defer.inlineCallbacks def test_getDetailsForBuildsetWithLogs(self): self.setupDb() res = yield utils.getDetailsForBuildset(self.master, 98, wantProperties=True, wantSteps=True, wantPreviousBuild=True, wantLogs=True) build1 = res['builds'][0] self.assertEqual( build1['steps'][0]['logs'][0]['content']['content'], self.LOGCONTENT) @defer.inlineCallbacks def test_getResponsibleUsers(self): self.setupDb() res = yield utils.getResponsibleUsersForSourceStamp(self.master, 234) self.assertEqual(res, ["me@foo"]) @defer.inlineCallbacks def test_getResponsibleUsersFromPatch(self): self.setupDb() res = yield utils.getResponsibleUsersForSourceStamp(self.master, 235) self.assertEqual(res, ["him@foo"]) @defer.inlineCallbacks def test_getResponsibleUsersForBuild(self): self.setupDb() res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertEqual(sorted(res), sorted(["me@foo", "him"])) @defer.inlineCallbacks def test_getResponsibleUsersForBuildWithBadOwner(self): self.setUpLogging() self.setupDb() self.db.insertTestData([ fakedb.BuildProperty( buildid=20, name="owner", value=["him"]), ]) res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertLogged("Please report a bug") self.assertEqual(sorted(res), sorted(["me@foo", "him"])) @defer.inlineCallbacks def test_getResponsibleUsersForBuildWithOwners(self): self.setupDb() self.db.insertTestData([ fakedb.BuildProperty( buildid=20, name="owners", value=["him", "her"]), ]) res = yield utils.getResponsibleUsersForBuild(self.master, 20) self.assertEqual(sorted(res), sorted(["me@foo", "him", "her"])) @defer.inlineCallbacks def test_getPreviousBuild(self): self.setupDb() build = yield self.master.data.get(("builds", 21)) res = yield utils.getPreviousBuild(self.master, build) self.assertEqual(res['buildid'], 20) @defer.inlineCallbacks def test_getPreviousBuildWithRetry(self): self.setupDb() build = yield self.master.data.get(("builds", 20)) res = yield utils.getPreviousBuild(self.master, build) self.assertEqual(res['buildid'], 18) class TestURLUtils(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) def test_UrlForBuild(self): self.assertEqual(utils.getURLForBuild(self.master, 1, 3), 'http://localhost:8080/#builders/1/builds/3') buildbot-2.6.0/master/buildbot/test/unit/test_reporters_words.py000066400000000000000000000744021361162603000252410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import mock from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.reporters import words from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import datetime2epoch class ContactMixin(TestReactorMixin): botClass = words.StatusBot channelClass = words.Channel contactClass = words.Contact USER = "me" CHANNEL = "#buildbot" BUILDER_NAMES = ['builder1', 'builder2'] BUILDER_IDS = [23, 45] @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.patch(reactor, 'callLater', self.reactor.callLater) self.patch(reactor, 'seconds', self.reactor.seconds) self.patch(reactor, 'stop', self.reactor.stop) self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) for builderid, name in zip(self.BUILDER_IDS, self.BUILDER_NAMES): self.master.db.builders.addTestBuilder( builderid=builderid, name=name) self.bot = self.botClass(notify_events={'success': 1, 'failure': 1}) self.bot.channelClass = self.channelClass self.bot.contactClass = self.contactClass self.bot.nickname = 'nick' self.missing_workers = set() # fake out subscription/unsubscription self.subscribed = False # fake out clean shutdown self.bot.parent = self self.bot.master.botmaster = mock.Mock( name='StatusBot-instance.master.botmaster') self.bot.master.botmaster.shuttingDown = False def cleanShutdown(): self.bot.master.botmaster.shuttingDown = True self.bot.master.botmaster.cleanShutdown = cleanShutdown def cancelCleanShutdown(): self.bot.master.botmaster.shuttingDown = False self.bot.master.botmaster.cancelCleanShutdown = cancelCleanShutdown self.contact = self.contactClass(user=self.USER, channel=self.bot.getChannel(self.CHANNEL)) yield self.contact.channel.setServiceParent(self.master) yield self.master.startService() def patch_send(self): self.sent = [] def send(msg): if not isinstance(msg, (list, tuple)): msg = (msg,) for m in msg: self.sent.append(m) self.contact.channel.send = send @defer.inlineCallbacks def do_test_command(self, command, args='', contact=None, clock_ticks=None, exp_usage=True, exp_UsageError=False, shuttingDown=False, **kwargs): if contact is None: contact = self.contact cmd = getattr(contact, 'command_' + command.upper()) if exp_usage: self.assertTrue(hasattr(cmd, 'usage')) self.patch_send() self.bot.master.botmaster.shuttingDown = shuttingDown if exp_UsageError: try: yield cmd(args, **kwargs) except words.UsageError: return else: self.fail("no UsageError") else: yield cmd(args, **kwargs) if clock_ticks: self.reactor.pump(clock_ticks) def setupSomeBuilds(self): self.master.db.insertTestData([ # Three builds on builder#0, One build on builder#1 fakedb.Build(id=13, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=82, number=3), fakedb.Build(id=14, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=83, number=4), fakedb.Build(id=15, masterid=88, workerid=13, builderid=self.BUILDER_IDS[1], buildrequestid=84, number=5), fakedb.Build(id=16, masterid=88, workerid=13, builderid=self.BUILDER_IDS[0], buildrequestid=85, number=6), ]) self.master.db.builds.finishBuild(buildid=14, results=SUCCESS) def setup_multi_builders(self): # Make first builder configured, but not connected # Make second builder configured and connected self.master.db.insertTestData([ fakedb.Worker(id=1, name='linux1', info={}), # connected one fakedb.Worker(id=2, name='linux2', info={}), # disconnected one fakedb.BuilderMaster( id=4012, masterid=13, builderid=self.BUILDER_IDS[0]), fakedb.BuilderMaster( id=4013, masterid=13, builderid=self.BUILDER_IDS[1]), fakedb.ConfiguredWorker(id=14013, workerid=2, buildermasterid=4012), fakedb.ConfiguredWorker(id=14013, workerid=1, buildermasterid=4013), ]) class TestContact(ContactMixin, unittest.TestCase): def test_channel_service(self): self.assertTrue(self.contact.channel.running) self.contact.channel.stopService() @defer.inlineCallbacks def test_command_notify0(self): yield self.do_test_command('notify', exp_UsageError=True) yield self.do_test_command('notify', args="invalid arg", exp_UsageError=True) yield self.do_test_command('notify', args="on") self.assertEqual( self.sent, ["The following events are being notified: finished, started."]) yield self.do_test_command('notify', args="off") self.assertEqual( self.sent, ['No events are being notified.']) yield self.do_test_command('notify', args="on started") self.assertEqual( self.sent, ["The following events are being notified: started."]) yield self.do_test_command('notify', args="off started") self.assertEqual( self.sent, ['No events are being notified.']) yield self.assertFailure( self.do_test_command('notify', args="off finished"), KeyError) yield self.do_test_command('notify', args="list") self.assertEqual( self.sent, ['No events are being notified.']) @defer.inlineCallbacks def notify_build_test(self, notify_args): self.bot.tags = None yield self.test_command_watch_builder0() yield self.do_test_command('notify', args=notify_args) buildStarted = self.contact.channel.subscribed[0].callback buildFinished = self.contact.channel.subscribed[1].callback for buildid in (13, 14, 16): self.master.db.builds.finishBuild(buildid=buildid, results=SUCCESS) build = yield self.master.db.builds.getBuild(buildid) buildStarted("somekey", build) buildFinished("somekey", build) def test_command_notify_build_started(self): self.notify_build_test("on started") def test_command_notify_build_finished(self): self.notify_build_test("on finished") def test_command_notify_build_better(self): self.notify_build_test("on better") def test_command_notify_build_worse(self): self.notify_build_test("on worse") def test_command_notify_build_problem(self): self.notify_build_test("on problem") def test_command_notify_build_recovery(self): self.notify_build_test("on recovery") def test_command_notify_build_started_finished(self): self.notify_build_test("on") @defer.inlineCallbacks def test_notify_missing_worker(self): self.patch_send() yield self.do_test_command('notify', args='on worker') missing_worker = self.contact.channel.subscribed[2].callback missing_worker((None, None, 'missing'), dict(workerid=1, name="work", last_connection="sometime")) self.assertEquals(self.sent[1], "Worker `work` is missing. It was seen last on sometime.") self.assertIn(1, self.contact.channel.missing_workers) @defer.inlineCallbacks def test_notify_worker_is_back(self): self.patch_send() yield self.do_test_command('notify', args='on worker') self.contact.channel.missing_workers.add(1) missing_worker = self.contact.channel.subscribed[2].callback missing_worker((None, None, 'connected'), dict(workerid=1, name="work", last_connection="sometime")) self.assertEquals(self.sent[1], "Worker `work` is back online.") self.assertNotIn(1, self.contact.channel.missing_workers) @defer.inlineCallbacks def test_command_help_noargs(self): yield self.do_test_command('help') self.assertIn('help - ', '\n'.join(self.sent)) @defer.inlineCallbacks def test_command_help_arg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = 'foo - bar' yield self.do_test_command('help', args='foo') self.assertIn('Usage: foo - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_no_usage(self): self.contact.command_FOO = lambda: None yield self.do_test_command('help', args='foo') self.assertIn('No usage info for', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { None: 'foo - bar' } yield self.do_test_command('help', args='foo') self.assertIn('Usage: foo - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = {} yield self.do_test_command('help', args='foo') self.assertIn("No usage info for 'foo'", self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { 'this': 'foo this - bar' } yield self.do_test_command('help', args='foo this') self.assertIn('Usage: foo this - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { # nothing for arg 'this' ('this', 'first'): 'foo this first - bar' } yield self.do_test_command('help', args='foo this') self.assertIn("No usage info for 'foo' 'this'", self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_subarg(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { ('this', 'first'): 'foo this first - bar' } yield self.do_test_command('help', args='foo this first') self.assertIn('Usage: foo this first - bar', self.sent[0]) @defer.inlineCallbacks def test_command_help_dict_command_arg_subarg_no_usage(self): self.contact.command_FOO = lambda: None self.contact.command_FOO.usage = { None: 'foo - bar', 'this': 'foo this - bar', ('this', 'first'): 'foo this first - bar' # nothing for subarg 'missing' } yield self.do_test_command('help', args='foo this missing') self.assertIn("No usage info for 'foo' 'this' 'missing'", self.sent[0]) @defer.inlineCallbacks def test_command_help_nosuch(self): yield self.do_test_command('help', args='foo', exp_UsageError=True) @defer.inlineCallbacks def test_command_shutdown(self): yield self.do_test_command('shutdown', exp_UsageError=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) @defer.inlineCallbacks def test_command_shutdown_check_running(self): yield self.do_test_command('shutdown', args='check', shuttingDown=False) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) self.assertIn('buildbot is running', self.sent[0]) @defer.inlineCallbacks def test_command_shutdown_check_shutting_down(self): yield self.do_test_command('shutdown', args='check', shuttingDown=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, True) self.assertIn('buildbot is shutting down', self.sent[0]) @defer.inlineCallbacks def test_command_shutdown_start(self): yield self.do_test_command('shutdown', args='start', shuttingDown=False) self.assertEqual(self.bot.master.botmaster.shuttingDown, True) @defer.inlineCallbacks def test_command_shutdown_stop(self): yield self.do_test_command('shutdown', args='stop', shuttingDown=True) self.assertEqual(self.bot.master.botmaster.shuttingDown, False) @defer.inlineCallbacks def test_command_shutdown_now(self): yield self.do_test_command('shutdown', args='now') self.assertEqual(self.bot.master.botmaster.shuttingDown, False) self.assertTrue(self.reactor.stop_called) @defer.inlineCallbacks def test_command_source(self): yield self.do_test_command('source') self.assertIn('My source', self.sent[0]) @defer.inlineCallbacks def test_command_commands(self): yield self.do_test_command('commands') self.assertIn('Buildbot commands', self.sent[0]) @defer.inlineCallbacks def test_command_hello(self): yield self.do_test_command('hello', exp_usage=False) self.assertIn(self.sent[0], words.GREETINGS) @defer.inlineCallbacks def test_command_list(self): yield self.do_test_command('list', exp_UsageError=True) @defer.inlineCallbacks def test_command_list_builders(self): yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) for builder in self.BUILDER_NAMES: self.assertIn('%s [offline]' % builder, self.sent[0]) @defer.inlineCallbacks def test_command_list_workers(self): workers = ['worker1', 'worker2'] for worker in workers: self.master.db.workers.db.insertTestData([ fakedb.Worker(name=worker) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) for worker in workers: self.assertIn('%s [offline]' % worker, self.sent[0]) @defer.inlineCallbacks def test_command_list_workers_online(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all workers') self.assertEqual(len(self.sent), 1) self.assertNotIn('linux1 [disconnected]', self.sent[0]) self.assertIn('linux2 [disconnected]', self.sent[0]) @defer.inlineCallbacks def test_command_list_changes(self): self.master.db.workers.db.insertTestData([ fakedb.Change() ]) yield self.do_test_command('list', args='2 changes') self.assertEqual(len(self.sent), 1) @defer.inlineCallbacks def test_command_list_builders_not_connected(self): self.setup_multi_builders() yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) self.assertIn('%s [offline]' % self.BUILDER_NAMES[0], self.sent[0]) self.assertIn('%s [offline]' % self.BUILDER_NAMES[1], self.sent[0]) @defer.inlineCallbacks def test_command_list_builders_connected(self): self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=1) ]) yield self.do_test_command('list', args='all builders') self.assertEqual(len(self.sent), 1) self.assertIn('%s [offline]' % self.BUILDER_NAMES[0], self.sent[0]) self.assertNotIn('%s [offline]' % self.BUILDER_NAMES[1], self.sent[0]) @defer.inlineCallbacks def test_command_status(self): yield self.do_test_command('status') @defer.inlineCallbacks def test_command_status_online(self): # we are online and we have some finished builds self.setup_multi_builders() self.master.db.insertTestData([ fakedb.ConfiguredWorker(id=14012, workerid=1, buildermasterid=4013), fakedb.ConnectedWorker(id=114, masterid=13, workerid=1) ]) self.setupSomeBuilds() self.master.db.builds.finishBuild(buildid=13, results=FAILURE) self.master.db.builds.finishBuild(buildid=15, results=SUCCESS) self.master.db.builds.finishBuild(buildid=16, results=FAILURE) yield self.do_test_command('status') @defer.inlineCallbacks def test_command_status_all(self): yield self.do_test_command('status', args='all') @defer.inlineCallbacks def test_command_status_builder0_offline(self): yield self.do_test_command('status', args=self.BUILDER_NAMES[0]) self.assertEqual(self.sent, ['`%s`: offline' % self.BUILDER_NAMES[0]]) @defer.inlineCallbacks def test_command_status_builder0_running(self): self.setupSomeBuilds() yield self.do_test_command('status', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) self.assertIn('`builder1`: running', self.sent[0]) self.assertRegex(self.sent[0], r' build \[#3\].* \(no current step\)') self.assertRegex(self.sent[0], r' build \[#6\].* \(no current step\)') @defer.inlineCallbacks def test_command_status_bogus(self): yield self.do_test_command('status', args='bogus_builder', exp_UsageError=True) def sub_seconds(self, strings): # sometimes timing works out wrong, so just call it "n seconds" return [re.sub(r'\d seconds|a moment', 'N seconds', s) for s in strings] @defer.inlineCallbacks def test_command_last(self): self.setupSomeBuilds() self.setup_multi_builders() # Also set the connectedness: self.master.db.insertTestData([ fakedb.ConnectedWorker(id=113, masterid=13, workerid=2) ]) yield self.do_test_command('last') self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent) @defer.inlineCallbacks def test_command_last_all(self): self.setupSomeBuilds() yield self.do_test_command('last', args='all') self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent[0]) self.assertIn( '`builder2`: no builds run since last restart', sent[0]) @defer.inlineCallbacks def test_command_last_builder_bogus(self): yield self.do_test_command('last', args="BOGUS", exp_UsageError=True) @defer.inlineCallbacks def test_command_last_builder0(self): self.setupSomeBuilds() yield self.do_test_command('last', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) sent = self.sub_seconds(self.sent) self.assertIn( '`builder1`: last build completed successfully (N seconds ago)', sent) @defer.inlineCallbacks def test_command_last_builder1(self): self.setupSomeBuilds() yield self.do_test_command('last', args=self.BUILDER_NAMES[1]) self.assertEqual(len(self.sent), 1) self.assertIn( '`builder2`: no builds run since last restart', self.sent) @defer.inlineCallbacks def test_command_watch(self): yield self.do_test_command('watch', exp_UsageError=True) @defer.inlineCallbacks def test_command_watch_builder0_no_builds(self): yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) self.assertIn('There are no currently running builds.', self.sent[0]) @defer.inlineCallbacks def test_command_watch_builder0(self): self.setupSomeBuilds() yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 2) self.assertIn( 'Watching build [#3](http://localhost:8080/#builders/23/builds/3) of `builder1` until it finishes...', self.sent) self.assertIn( 'Watching build [#6](http://localhost:8080/#builders/23/builds/6) of `builder1` until it finishes...', self.sent) @defer.inlineCallbacks def test_command_watch_builder0_get_notifications(self): # (continue from the prior test) self.bot.tags = None yield self.test_command_watch_builder0() del self.sent[:] yield self.sendBuildFinishedMessage(16) self.assertEqual(len(self.sent), 1) self.assertIn( "Build [#6](http://localhost:8080/#builders/23/builds/6) of `builder1` completed successfully.", self.sent) @defer.inlineCallbacks def test_command_watch_builder1(self): self.setupSomeBuilds() yield self.do_test_command('watch', args=self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 2) self.assertIn( 'Watching build [#3](http://localhost:8080/#builders/23/builds/3) of `builder1` until it finishes...', self.sent) self.assertIn( 'Watching build [#6](http://localhost:8080/#builders/23/builds/6) of `builder1` until it finishes...', self.sent) @defer.inlineCallbacks def sendBuildFinishedMessage(self, buildid, results=0): self.master.db.builds.finishBuild(buildid=buildid, results=SUCCESS) build = yield self.master.db.builds.getBuild(buildid) self.master.mq.callConsumer(('builds', str(buildid), 'complete'), dict( buildid=buildid, number=build['number'], builderid=build['builderid'], buildrequestid=build['buildrequestid'], workerid=build['workerid'], masterid=build['masterid'], started_at=datetime2epoch( build['started_at']), complete=True, complete_at=datetime2epoch( build['complete_at']), state_string='', results=results, )) @defer.inlineCallbacks def test_command_stop(self): yield self.do_test_command('stop', exp_UsageError=True) @defer.inlineCallbacks def test_command_stop_bogus_builder(self): yield self.do_test_command('stop', args="build BOGUS 'i have a reason'", exp_UsageError=True) @defer.inlineCallbacks def test_command_stop_builder0_no_builds(self): yield self.do_test_command('stop', args="build %s 'i have a reason'" % self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 1) self.assertIn('no build is', self.sent[0]) @defer.inlineCallbacks def test_command_stop_builder0_1_builds(self): self.setupSomeBuilds() yield self.do_test_command('stop', args="build %s 'i have a reason'" % self.BUILDER_NAMES[0]) self.assertEqual(len(self.sent), 2) self.assertRegex(self.sent[0], r'Build \[#[36]\].* of `builder1` interrupted') self.assertRegex(self.sent[1], r'Build \[#[63]\].* of `builder1` interrupted') @defer.inlineCallbacks def test_command_force_no_args(self): yield self.do_test_command('force', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_wrong_first_arg(self): yield self.do_test_command('force', args='notbuild', exp_UsageError=True) @defer.inlineCallbacks def test_command_force_build_no_args(self): yield self.do_test_command('force', args='build', exp_UsageError=True) # TODO: missing tests for: # - bad args # - arg validation failure (self.master.config.validation) @defer.inlineCallbacks def test_command_force(self): yield self.do_test_command( 'force', args='build --branch BRANCH1 --revision REV1 --props=PROP1=VALUE1 {} REASON' .format(self.BUILDER_NAMES[0])) @defer.inlineCallbacks def test_handleMessage_short_command(self): self.contact.command_TESTY = mock.Mock() yield self.contact.handleMessage('testy') self.contact.command_TESTY.assert_called_with('') @defer.inlineCallbacks def test_handleMessage_long_command(self): self.contact.command_TESTY = mock.Mock() yield self.contact.handleMessage('testy westy boo') self.contact.command_TESTY.assert_called_with('westy boo') @defer.inlineCallbacks def test_handleMessage_excited(self): self.patch_send() yield self.contact.handleMessage('hi!') self.assertEqual(len(self.sent), 1) # who cares what it says.. @defer.inlineCallbacks def test_handleMessage_exception(self): self.patch_send() def command_TESTY(msg): raise RuntimeError("FAIL") self.contact.command_TESTY = command_TESTY yield self.contact.handleMessage('testy boom') self.assertEqual(self.sent, ["Something bad happened (see logs)"]) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_handleMessage_UsageError(self): self.patch_send() def command_TESTY(msg): raise words.UsageError("oh noes") self.contact.command_TESTY = command_TESTY yield self.contact.handleMessage('testy boom') self.assertEqual(self.sent, ["oh noes"]) @defer.inlineCallbacks def test_unclosed_quote(self): yield self.do_test_command('list', args='args\'', exp_UsageError=True) yield self.do_test_command('status', args='args\'', exp_UsageError=True) yield self.do_test_command('notify', args='args\'', exp_UsageError=True) yield self.do_test_command('watch', args='args\'', exp_UsageError=True) yield self.do_test_command('force', args='args\'', exp_UsageError=True) yield self.do_test_command('stop', args='args\'', exp_UsageError=True) yield self.do_test_command('last', args='args\'', exp_UsageError=True) yield self.do_test_command('help', args='args\'', exp_UsageError=True) @defer.inlineCallbacks def test_buildStarted(self): self.setupSomeBuilds() self.patch_send() build = yield self.master.db.builds.getBuild(13) self.bot.tags = None self.contact.channel.notify_for = lambda _: True self.contact.useRevisions = False self.contact.channel.buildStarted(build) self.assertEqual( self.sent.pop(), "Build [#3](http://localhost:8080/#builders/23/builds/3) of `builder1` started.") def test_getCommandMethod_authz_default(self): self.bot.authz = words.StatusBot.expand_authz(None) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) authz1 = { 'force': ['me'], 'shutdown': ['notme', 'someone'], ('dance', 'notify'): True, '': False} def test_getCommandMethod_explicit_allow(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('force') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_disallow(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_multi(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) self.assertIn('DANCE', self.bot.authz) meth = self.contact.getCommandMethod('dance') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_explicit_default(self): self.bot.authz = words.StatusBot.expand_authz(self.authz1) meth = self.contact.getCommandMethod('help') self.assertEqual(meth, self.contact.access_denied) authz2 = { 'shutdown': False, '': False, '*': True} def test_getCommandMethod_exclamation(self): self.bot.authz = words.StatusBot.expand_authz(self.authz2) meth = self.contact.getCommandMethod('help') self.assertNotEqual(meth, self.contact.access_denied) def test_getCommandMethod_exclamation_override(self): self.bot.authz = words.StatusBot.expand_authz(self.authz2) meth = self.contact.getCommandMethod('shutdown') self.assertEqual(meth, self.contact.access_denied) def test_access_denied(self): self.patch_send() self.contact.access_denied() self.assertIn("not pass", self.sent[0]) @defer.inlineCallbacks def test_bot_loadState(self): boid = yield self.bot._get_object_id() self.master.db.insertTestData([ fakedb.ObjectState(objectid=boid, name='notify_events', value_json='[["#channel1", ["warnings"]]]'), ]) yield self.bot.loadState() self.assertEqual(self.bot.channels['#channel1'].notify_events, {'warnings'}) buildbot-2.6.0/master/buildbot/test/unit/test_revlinks.py000066400000000000000000000122641361162603000236310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.revlinks import GithubRevlink from buildbot.revlinks import GitwebMatch from buildbot.revlinks import RevlinkMatch from buildbot.revlinks import SourceforgeGitRevlink from buildbot.revlinks import SourceforgeGitRevlink_AlluraPlatform from buildbot.revlinks import default_revlink_matcher class TestGithubRevlink(unittest.TestCase): revision = 'b6874701b54e0043a78882b020afc86033133f91' url = 'https://github.com/buildbot/buildbot/commit/b6874701b54e0043a78882b020afc86033133f91' def testHTTPS(self): self.assertEqual(GithubRevlink(self.revision, 'https://github.com/buildbot/buildbot.git'), self.url) def testGIT(self): self.assertEqual(GithubRevlink(self.revision, 'git://github.com/buildbot/buildbot.git'), self.url) def testSSH(self): self.assertEqual(GithubRevlink(self.revision, 'git@github.com:buildbot/buildbot.git'), self.url) def testSSHuri(self): self.assertEqual(GithubRevlink(self.revision, 'ssh://git@github.com/buildbot/buildbot.git'), self.url) class TestSourceforgeGitRevlink(unittest.TestCase): revision = 'b99c89a2842d386accea8072ae5bb6e24aa7cf29' url = 'http://gemrb.git.sourceforge.net/git/gitweb.cgi?p=gemrb/gemrb;a=commit;h=b99c89a2842d386accea8072ae5bb6e24aa7cf29' def testGIT(self): self.assertEqual(SourceforgeGitRevlink(self.revision, 'git://gemrb.git.sourceforge.net/gitroot/gemrb/gemrb'), self.url) def testSSH(self): self.assertEqual(SourceforgeGitRevlink(self.revision, 'somebody@gemrb.git.sourceforge.net:gitroot/gemrb/gemrb'), self.url) def testSSHuri(self): self.assertEqual(SourceforgeGitRevlink(self.revision, 'ssh://somebody@gemrb.git.sourceforge.net/gitroot/gemrb/gemrb'), self.url) class TestSourceforgeGitRevlink_AlluraPlatform(unittest.TestCase): revision = '6f9b1470bae497c6ce47e4cf8c9195d864d2ba2f' url = 'https://sourceforge.net/p/klusters/klusters/ci/6f9b1470bae497c6ce47e4cf8c9195d864d2ba2f/' def testGIT(self): self.assertEqual(SourceforgeGitRevlink_AlluraPlatform(self.revision, 'git://git.code.sf.net/p/klusters/klusters'), self.url) def testSSHuri(self): self.assertEqual(SourceforgeGitRevlink_AlluraPlatform(self.revision, 'ssh://somebody@git.code.sf.net/p/klusters/klusters'), self.url) class TestRevlinkMatch(unittest.TestCase): def testNotmuch(self): revision = 'f717d2ece1836c863f9cc02abd1ff2539307cd1d' matcher = RevlinkMatch(['git://notmuchmail.org/git/(.*)'], r'http://git.notmuchmail.org/git/\1/commit/%s') self.assertEqual(matcher(revision, 'git://notmuchmail.org/git/notmuch'), 'http://git.notmuchmail.org/git/notmuch/commit/f717d2ece1836c863f9cc02abd1ff2539307cd1d') def testSingleString(self): revision = 'rev' matcher = RevlinkMatch('test', 'out%s') self.assertEqual(matcher(revision, 'test'), 'outrev') def testSingleUnicode(self): revision = 'rev' matcher = RevlinkMatch('test', 'out%s') self.assertEqual(matcher(revision, 'test'), 'outrev') def testTwoCaptureGroups(self): revision = 'rev' matcher = RevlinkMatch('([A-Z]*)Z([0-9]*)', r'\2-\1-%s') self.assertEqual(matcher(revision, 'ABCZ43'), '43-ABC-rev') class TestGitwebMatch(unittest.TestCase): def testOrgmode(self): revision = '490d6ace10e0cfe74bab21c59e4b7bd6aa3c59b8' matcher = GitwebMatch( 'git://orgmode.org/(?P.*)', 'http://orgmode.org/w/') self.assertEqual(matcher(revision, 'git://orgmode.org/org-mode.git'), 'http://orgmode.org/w/?p=org-mode.git;a=commit;h=490d6ace10e0cfe74bab21c59e4b7bd6aa3c59b8') class TestDefaultRevlinkMultiPlexer(unittest.TestCase): revision = "0" def testAllRevlinkMatchers(self): # GithubRevlink self.assertTrue(default_revlink_matcher( self.revision, 'https://github.com/buildbot/buildbot.git')) # SourceforgeGitRevlink self.assertTrue(default_revlink_matcher( self.revision, 'git://gemrb.git.sourceforge.net/gitroot/gemrb/gemrb')) # SourceforgeGitRevlink_AlluraPlatform self.assertTrue(default_revlink_matcher( self.revision, 'git://git.code.sf.net/p/klusters/klusters')) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_base.py000066400000000000000000000730531361162603000251320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.changes import changes from buildbot.process import properties from buildbot.process.properties import Interpolate from buildbot.schedulers import base from buildbot.test.fake import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class BaseScheduler(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 19 SCHEDULERID = 9 exp_bsid_brids = (123, {'b': 456}) def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, name='testsched', builderNames=None, properties=None, codebases=None): if builderNames is None: builderNames = ['a', 'b'] if properties is None: properties = {} if codebases is None: codebases = {'': {}} if isinstance(builderNames, list): dbBuilder = list() builderid = 0 for builderName in builderNames: builderid += 1 dbBuilder.append(fakedb.Builder(id=builderid, name=builderName)) self.master.db.insertTestData(dbBuilder) sched = self.attachScheduler( base.BaseScheduler(name=name, builderNames=builderNames, properties=properties, codebases=codebases), self.OBJECTID, self.SCHEDULERID) self.master.data.updates.addBuildset = mock.Mock( name='data.addBuildset', side_effect=lambda *args, **kwargs: defer.succeed(self.exp_bsid_brids)) return sched # tests def test_constructor_builderNames(self): with self.assertRaises(config.ConfigErrors): self.makeScheduler(builderNames='xxx') def test_constructor_builderNames_unicode(self): self.makeScheduler(builderNames=['a']) def test_constructor_builderNames_renderable(self): @properties.renderer def names(props): return ['a'] self.makeScheduler(builderNames=names) def test_constructor_codebases_valid(self): codebases = {"codebase1": {"repository": "", "branch": "", "revision": ""}} self.makeScheduler(codebases=codebases) def test_constructor_codebases_valid_list(self): codebases = ['codebase1'] self.makeScheduler(codebases=codebases) def test_constructor_codebases_invalid(self): # scheduler only accepts codebases with at least repository set codebases = {"codebase1": {"dictionary": "", "that": "", "fails": ""}} with self.assertRaises(config.ConfigErrors): self.makeScheduler(codebases=codebases) @defer.inlineCallbacks def test_getCodebaseDict(self): sched = self.makeScheduler( codebases={'lib': {'repository': 'librepo'}}) cbd = yield sched.getCodebaseDict('lib') self.assertEqual(cbd, {'repository': 'librepo'}) @defer.inlineCallbacks def test_getCodebaseDict_constructedFromList(self): sched = self.makeScheduler(codebases=['lib', 'lib2']) cbd = yield sched.getCodebaseDict('lib') self.assertEqual(cbd, {}) def test_getCodebaseDict_not_found(self): sched = self.makeScheduler( codebases={'lib': {'repository': 'librepo'}}) return self.assertFailure(sched.getCodebaseDict('app'), KeyError) def test_listBuilderNames(self): sched = self.makeScheduler(builderNames=['x', 'y']) self.assertEqual(sched.listBuilderNames(), ['x', 'y']) @defer.inlineCallbacks def test_startConsumingChanges_fileIsImportant_check(self): sched = self.makeScheduler() try: yield sched.startConsumingChanges(fileIsImportant="maybe") except AssertionError: pass else: self.fail("didn't assert") @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler() expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def do_test_change_consumption(self, kwargs, expected_result): # (expected_result should be True (important), False (unimportant), or # None (ignore the change)) sched = self.makeScheduler() sched.startService() self.addCleanup(sched.stopService) # set up a change message, a changedict, a change, and convince # getChange and fromChdict to convert one to the other msg = dict(changeid=12934) chdict = dict(changeid=12934, is_chdict=True) def getChange(changeid): assert changeid == 12934 return defer.succeed(chdict) self.db.changes.getChange = getChange change = self.makeFakeChange() change.number = 12934 def fromChdict(cls, master, chdict): assert chdict['changeid'] == 12934 and chdict['is_chdict'] return defer.succeed(change) self.patch(changes.Change, 'fromChdict', classmethod(fromChdict)) change_received = [None] def gotChange(got_change, got_important): # check that we got the expected change object self.assertIdentical(got_change, change) change_received[0] = got_important return defer.succeed(None) sched.gotChange = gotChange yield sched.startConsumingChanges(**kwargs) # check that it registered callbacks self.assertEqual(len(self.mq.qrefs), 2) qref = self.mq.qrefs[1] self.assertEqual(qref.filter, ('changes', None, 'new')) # invoke the callback with the change, and check the result qref.callback('change.12934.new', msg) self.assertEqual(change_received[0], expected_result) def test_change_consumption_defaults(self): # all changes are important by default return self.do_test_change_consumption( dict(), True) def test_change_consumption_fileIsImportant_True(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: True), True) def test_change_consumption_fileIsImportant_False(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: False), False) @defer.inlineCallbacks def test_change_consumption_fileIsImportant_exception(self): yield self.do_test_change_consumption( dict(fileIsImportant=lambda c: 1 / 0), None) self.assertEqual(1, len(self.flushLoggedErrors(ZeroDivisionError))) def test_change_consumption_change_filter_True(self): cf = mock.Mock() cf.filter_change = lambda c: True return self.do_test_change_consumption( dict(change_filter=cf), True) def test_change_consumption_change_filter_False(self): cf = mock.Mock() cf.filter_change = lambda c: False return self.do_test_change_consumption( dict(change_filter=cf), None) def test_change_consumption_fileIsImportant_False_onlyImportant(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: False, onlyImportant=True), None) def test_change_consumption_fileIsImportant_True_onlyImportant(self): return self.do_test_change_consumption( dict(fileIsImportant=lambda c: True, onlyImportant=True), True) @defer.inlineCallbacks def test_activation(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() sched.activate = mock.Mock(return_value=defer.succeed(None)) sched.deactivate = mock.Mock(return_value=defer.succeed(None)) # set the schedulerid, and claim the scheduler on another master yield self.setSchedulerToMaster(self.OTHER_MASTER_ID) yield sched.startService() sched.clock.advance(sched.POLL_INTERVAL_SEC / 2) sched.clock.advance(sched.POLL_INTERVAL_SEC / 5) sched.clock.advance(sched.POLL_INTERVAL_SEC / 5) self.assertFalse(sched.activate.called) self.assertFalse(sched.deactivate.called) self.assertFalse(sched.isActive()) # objectid is attached by the test helper self.assertEqual(sched.serviceid, self.SCHEDULERID) # clear that masterid yield sched.stopService() self.setSchedulerToMaster(None) yield sched.startService() sched.clock.advance(sched.POLL_INTERVAL_SEC) self.assertTrue(sched.activate.called) self.assertFalse(sched.deactivate.called) self.assertTrue(sched.isActive()) # stop the service and see that deactivate is called yield sched.stopService() self.assertTrue(sched.activate.called) self.assertTrue(sched.deactivate.called) self.assertFalse(sched.isActive()) def test_activation_claim_raises(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() # set the schedulerid, and claim the scheduler on another master self.setSchedulerToMaster(RuntimeError()) sched.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertFalse(sched.isActive()) def test_activation_activate_fails(self): sched = self.makeScheduler(name='n', builderNames=['a']) sched.clock = task.Clock() def activate(): raise RuntimeError('oh noes') sched.activate = activate sched.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) @defer.inlineCallbacks def do_addBuildsetForSourceStampsWithDefaults(self, codebases, sourcestamps, exp_sourcestamps): sched = self.makeScheduler(name='n', builderNames=['b'], codebases=codebases) bsid, brids = yield sched.addBuildsetForSourceStampsWithDefaults( reason='power', sourcestamps=sourcestamps, waited_for=False) self.assertEqual((bsid, brids), self.exp_bsid_brids) call = self.master.data.updates.addBuildset.mock_calls[0] def sourceStampKey(sourceStamp): repository = sourceStamp.get('repository', '') if repository is None: repository = '' branch = sourceStamp.get('branch', '') if not None else '' if branch is None: branch = '' return (repository, branch) self.assertEqual(sorted(call[2]['sourcestamps'], key=sourceStampKey), sorted(exp_sourcestamps, key=sourceStampKey)) def test_addBuildsetForSourceStampsWithDefaults(self): codebases = { 'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict(repository='svn://B..', branch='stable', revision='24680') } sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, {'codebase': 'cbB', 'revision': 'BB'}, ] exp_sourcestamps = [ {'repository': 'svn://B..', 'branch': 'stable', 'revision': 'BB', 'codebase': 'cbB', 'project': ''}, {'repository': 'svn://A..', 'branch': 'AA', 'project': '', 'revision': '13579', 'codebase': 'cbA'}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) def test_addBuildsetForSourceStampsWithDefaults_fill_in_codebases(self): codebases = { 'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict(repository='svn://B..', branch='stable', revision='24680') } sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, ] exp_sourcestamps = [ {'repository': 'svn://B..', 'branch': 'stable', 'revision': '24680', 'codebase': 'cbB', 'project': ''}, {'repository': 'svn://A..', 'branch': 'AA', 'project': '', 'revision': '13579', 'codebase': 'cbA'}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) def test_addBuildsetForSourceStampsWithDefaults_no_repository(self): exp_sourcestamps = [ {'repository': '', 'branch': None, 'revision': None, 'codebase': '', 'project': ''}, ] return self.do_addBuildsetForSourceStampsWithDefaults( {'': {}}, [], exp_sourcestamps) def test_addBuildsetForSourceStamps_unknown_codbases(self): codebases = {} sourcestamps = [ {'codebase': 'cbA', 'branch': 'AA'}, {'codebase': 'cbB', 'revision': 'BB'}, ] exp_sourcestamps = [ {'branch': None, 'revision': 'BB', 'codebase': 'cbB', 'project': '', 'repository': ''}, {'branch': 'AA', 'revision': None, 'codebase': 'cbA', 'project': '', 'repository': ''}, ] return self.do_addBuildsetForSourceStampsWithDefaults( codebases, sourcestamps, exp_sourcestamps) @defer.inlineCallbacks def test_addBuildsetForChanges_one_change(self): sched = self.makeScheduler(name='n', builderNames=['b']) self.db.insertTestData([ fakedb.Change(changeid=13, sourcestampid=234), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=False, changeids=[13]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='power', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_properties(self): sched = self.makeScheduler(name='n', builderNames=['c']) self.db.insertTestData([ fakedb.Change(changeid=14, sourcestampid=234), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='downstream', waited_for=False, changeids=[14]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='downstream', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_properties_with_virtual_builders(self): sched = self.makeScheduler(name='n', builderNames=['c'], properties={ 'virtual_builder_name': Interpolate("myproject-%(src::branch)s") }) self.db.insertTestData([ fakedb.SourceStamp(id=234, branch='dev1', project="linux"), fakedb.Change(changeid=14, sourcestampid=234, branch="dev1"), ]) bsid, brids = yield sched.addBuildsetForChanges(reason='downstream', waited_for=False, changeids=[14]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'virtual_builder_name': ("myproject-dev1", "Scheduler"), 'scheduler': ('n', 'Scheduler'), }, reason='downstream', scheduler='n', sourcestamps=[234]) @defer.inlineCallbacks def test_addBuildsetForChanges_multiple_changes_same_codebase(self): # This is a test for backwards compatibility # Changes from different repositories come together in one build sched = self.makeScheduler(name='n', builderNames=['b', 'c'], codebases={'cb': {'repository': 'http://repo'}}) # No codebaseGenerator means all changes have codebase == '' self.db.insertTestData([ fakedb.Change(changeid=13, codebase='cb', sourcestampid=12), fakedb.Change(changeid=14, codebase='cb', sourcestampid=11), fakedb.Change(changeid=15, codebase='cb', sourcestampid=10), ]) # note that the changeids are given out of order here; it should still # use the most recent bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=False, changeids=[14, 15, 13]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), }, reason='power', scheduler='n', sourcestamps=[10]) # sourcestampid from greatest changeid @defer.inlineCallbacks def test_addBuildsetForChanges_codebases_set_multiple_codebases(self): codebases = {'cbA': dict(repository='svn://A..', branch='stable', revision='13579'), 'cbB': dict( repository='svn://B..', branch='stable', revision='24680'), 'cbC': dict( repository='svn://C..', branch='stable', revision='12345'), 'cbD': dict( repository='svn://D..')} # Scheduler gets codebases that can be used to create extra sourcestamps # for repositories that have no changes sched = self.makeScheduler(name='n', builderNames=['b', 'c'], codebases=codebases) self.db.insertTestData([ fakedb.Change(changeid=12, codebase='cbA', sourcestampid=912), fakedb.Change(changeid=13, codebase='cbA', sourcestampid=913), fakedb.Change(changeid=14, codebase='cbA', sourcestampid=914), fakedb.Change(changeid=15, codebase='cbB', sourcestampid=915), fakedb.Change(changeid=16, codebase='cbB', sourcestampid=916), fakedb.Change(changeid=17, codebase='cbB', sourcestampid=917), # note: no changes for cbC or cbD ]) # note that the changeids are given out of order here; it should still # use the most recent for each codebase bsid, brids = yield sched.addBuildsetForChanges(reason='power', waited_for=True, changeids=[14, 12, 17, 16, 13, 15]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=True, builderids=[1, 2], external_idstring=None, reason='power', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[914, 917, dict(branch='stable', repository='svn://C..', codebase='cbC', project='', revision='12345'), dict(branch=None, repository='svn://D..', codebase='cbD', project='', revision=None) ] ) @defer.inlineCallbacks def test_addBuildsetForSourceStamp(self): sched = self.makeScheduler(name='n', builderNames=['b']) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[91, {'sourcestamp': True}]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, reason='whynot', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[91, {'sourcestamp': True}]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_explicit_builderNames(self): sched = self.makeScheduler(name='n', builderNames=['b', 'x', 'y']) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=True, sourcestamps=[ 91, {'sourcestamp': True}], builderNames=['x', 'y']) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=True, builderids=[2, 3], external_idstring=None, reason='whynot', scheduler='n', properties={ 'scheduler': ('n', 'Scheduler'), }, sourcestamps=[91, {'sourcestamp': True}]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_properties(self): props = properties.Properties(xxx="yyy") sched = self.makeScheduler(name='n', builderNames=['b']) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[91], properties=props) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1], external_idstring=None, properties={ 'xxx': ('yyy', 'TEST'), 'scheduler': ('n', 'Scheduler')}, reason='whynot', scheduler='n', sourcestamps=[91]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_combine_change_properties(self): sched = self.makeScheduler() self.master.db.insertTestData([ fakedb.SourceStamp(id=98, branch='stable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.ChangeProperty(changeid=25, property_name='color', property_value='["pink","Change"]'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('testsched', 'Scheduler'), 'color': ('pink', 'Change')}, reason='whynot', scheduler='testsched', sourcestamps=[98]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_renderable_builderNames(self): @properties.renderer def names(props): if props.changes[0]['branch'] == 'stable': return ['c'] elif props.changes[0]['branch'] == 'unstable': return ['a', 'b'] sched = self.makeScheduler(name='n', builderNames=names) self.master.db.insertTestData([ fakedb.Builder(id=1, name='a'), fakedb.Builder(id=2, name='b'), fakedb.Builder(id=3, name='c'), fakedb.SourceStamp(id=98, branch='stable'), fakedb.SourceStamp(id=99, branch='unstable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.Change(changeid=26, sourcestampid=99, branch='unstable'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[3], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler')}, reason='whynot', scheduler='n', sourcestamps=[98]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='because', waited_for=False, sourcestamps=[99]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler')}, reason='because', scheduler='n', sourcestamps=[99]) @defer.inlineCallbacks def test_addBuildsetForSourceStamp_list_of_renderable_builderNames(self): names = ['a', 'b', properties.Interpolate('%(prop:extra_builder)s')] sched = self.makeScheduler(name='n', builderNames=names) self.master.db.insertTestData([ fakedb.Builder(id=1, name='a'), fakedb.Builder(id=2, name='b'), fakedb.Builder(id=3, name='c'), fakedb.SourceStamp(id=98, branch='stable'), fakedb.Change(changeid=25, sourcestampid=98, branch='stable'), fakedb.ChangeProperty(changeid=25, property_name='extra_builder', property_value='["c","Change"]'), ]) bsid, brids = yield sched.addBuildsetForSourceStamps(reason='whynot', waited_for=False, sourcestamps=[98]) self.assertEqual((bsid, brids), self.exp_bsid_brids) self.master.data.updates.addBuildset.assert_called_with( waited_for=False, builderids=[1, 2, 3], external_idstring=None, properties={ 'scheduler': ('n', 'Scheduler'), 'extra_builder': ('c', 'Change')}, reason='whynot', scheduler='n', sourcestamps=[98]) def test_signature_addBuildsetForChanges(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForChanges, # Real self.fake_addBuildsetForChanges, # Real ) def addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): pass def test_signature_addBuildsetForSourceStamps(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForSourceStamps, # Real self.fake_addBuildsetForSourceStamps, # Fake ) def addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): pass def test_signature_addBuildsetForSourceStampsWithDefaults(self): sched = self.makeScheduler(builderNames=['xxx']) @self.assertArgSpecMatches( sched.addBuildsetForSourceStampsWithDefaults, # Real self.fake_addBuildsetForSourceStampsWithDefaults, # Fake ) def addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): pass buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_basic.py000066400000000000000000000570651361162603000253060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.schedulers import basic from buildbot.test.fake import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class CommonStuffMixin: def makeScheduler(self, klass, **kwargs_override): kwargs = dict(name="tsched", treeStableTimer=60, builderNames=['tbuild']) kwargs.update(kwargs_override) self.master.db.insertTestData( [fakedb.Builder(name=builderName) for builderName in kwargs['builderNames']]) sched = self.attachScheduler( klass(**kwargs), self.OBJECTID, self.SCHEDULERID) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() # keep track of builds in self.events self.events = [] @self.assertArgSpecMatches(sched.addBuildsetForChanges) def addBuildsetForChanges( waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): self.assertEqual(external_idstring, None) self.assertEqual(reason, sched.reason) self.events.append('B%s@%d' % (repr(changeids).replace(' ', ''), self.clock.seconds())) return defer.succeed(None) sched.addBuildsetForChanges = addBuildsetForChanges # see self.assertConsumingChanges self.consumingChanges = None def startConsumingChanges(**kwargs): self.consumingChanges = kwargs return defer.succeed(None) sched.startConsumingChanges = startConsumingChanges return sched def assertConsumingChanges(self, **kwargs): self.assertEqual(self.consumingChanges, kwargs) class BaseBasicScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 244 SCHEDULERID = 4 # a custom subclass since we're testing the base class. This basically # re-implements SingleBranchScheduler, but with more asserts class Subclass(basic.BaseBasicScheduler): timer_started = False def getChangeFilter(self, *args, **kwargs): return kwargs.get('change_filter') def getTimerNameForChange(self, change): self.timer_started = True return "xxx" def getChangeClassificationsForTimer(self, schedulerid, timer_name): assert timer_name == "xxx" assert schedulerid == BaseBasicScheduler.SCHEDULERID return self.master.db.schedulers.getChangeClassifications(schedulerid) def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() # tests def test_constructor_positional_exception(self): with self.assertRaises(config.ConfigErrors): self.Subclass("tsched", "master", 60) @defer.inlineCallbacks def test_activate_no_treeStableTimer(self): cf = mock.Mock('cf') fII = mock.Mock('fII') sched = self.makeScheduler(self.Subclass, treeStableTimer=None, change_filter=cf, fileIsImportant=fII) self.db.schedulers.fakeClassifications(self.SCHEDULERID, {20: True}) yield sched.activate() # check that the scheduler has started to consume changes, and the # classifications *have* been flushed, since they will not be used self.assertConsumingChanges(fileIsImportant=fII, change_filter=cf, onlyImportant=False) self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) yield sched.deactivate() def test_subclass_fileIsImportant(self): class Subclass(self.Subclass): def fileIsImportant(self, change): return False sched = self.makeScheduler(Subclass, onlyImportant=True) self.assertEqual( Subclass.fileIsImportant.__get__(sched), sched.fileIsImportant) @defer.inlineCallbacks def test_activate_treeStableTimer(self): cf = mock.Mock() sched = self.makeScheduler( self.Subclass, treeStableTimer=10, change_filter=cf) self.db.schedulers.fakeClassifications(self.SCHEDULERID, {20: True}) self.master.db.insertTestData([ fakedb.Change(changeid=20), fakedb.SchedulerChange(schedulerid=self.SCHEDULERID, changeid=20, important=1) ]) yield sched.activate() # check that the scheduler has started to consume changes, and no # classifications have been flushed. Furthermore, the existing # classification should have been acted on, so the timer should be # running self.assertConsumingChanges(fileIsImportant=None, change_filter=cf, onlyImportant=False) self.db.schedulers.assertClassifications( self.SCHEDULERID, {20: True}) self.assertTrue(sched.timer_started) self.clock.advance(10) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_no_treeStableTimer_unimportant(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=None, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), False) self.assertEqual(self.events, []) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_no_treeStableTimer_important(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=None, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.assertEqual(self.events, ['B[13]@0']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_unimportant(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), False) self.assertEqual(self.events, []) self.clock.advance(10) self.assertEqual(self.events, []) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_important(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.clock.advance(10) self.assertEqual(self.events, ['B[13]@10']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_sequence(self): sched = self.makeScheduler( self.Subclass, treeStableTimer=9, branch='master') self.master.db.insertTestData([ fakedb.Change(changeid=1, branch='master', when_timestamp=1110), fakedb.ChangeFile(changeid=1, filename='readme.txt'), fakedb.Change(changeid=2, branch='master', when_timestamp=2220), fakedb.ChangeFile(changeid=2, filename='readme.txt'), fakedb.Change(changeid=3, branch='master', when_timestamp=3330), fakedb.ChangeFile(changeid=3, filename='readme.txt'), fakedb.Change(changeid=4, branch='master', when_timestamp=4440), fakedb.ChangeFile(changeid=4, filename='readme.txt'), ]) sched.activate() self.clock.advance(2220) # this important change arrives at 2220, so the stable timer will last # until 2229 yield sched.gotChange( self.makeFakeChange(branch='master', number=1, when=2220), True) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications(self.SCHEDULERID, {1: True}) # but another (unimportant) change arrives before then self.clock.advance(6) # to 2226 self.assertEqual(self.events, []) yield sched.gotChange( self.makeFakeChange(branch='master', number=2, when=2226), False) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications( self.SCHEDULERID, {1: True, 2: False}) self.clock.advance(3) # to 2229 self.assertEqual(self.events, []) self.clock.advance(3) # to 2232 self.assertEqual(self.events, []) # another important change arrives at 2232 yield sched.gotChange( self.makeFakeChange(branch='master', number=3, when=2232), True) self.assertEqual(self.events, []) self.db.schedulers.assertClassifications( self.SCHEDULERID, {1: True, 2: False, 3: True}) self.clock.advance(3) # to 2235 self.assertEqual(self.events, []) # finally, time to start the build! self.clock.advance(6) # to 2241 self.assertEqual(self.events, ['B[1,2,3]@2241']) self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) yield sched.deactivate() @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(self.Subclass) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(self.Subclass) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(self.Subclass) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) class SingleBranchScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): SCHEDULERID = 245 OBJECTID = 224455 codebases = {'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}} def makeFullScheduler(self, **kwargs): self.master.db.insertTestData( [fakedb.Builder(name=builderName) for builderName in kwargs['builderNames']]) sched = self.attachScheduler(basic.SingleBranchScheduler(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() return sched def mkbs(self, **kwargs): # create buildset for expected_buildset in assertBuildset. bs = dict(reason=self.sched.reason, external_idstring=None, sourcestampsetid=100, properties=[('scheduler', ('test', 'Scheduler'))]) bs.update(kwargs) return bs def mkss(self, **kwargs): # create sourcestamp for expected_sourcestamps in assertBuildset. ss = dict( branch='master', project='', repository='', sourcestampsetid=100) ss.update(kwargs) return ss def mkch(self, **kwargs): # create changeset and insert in database. chd = dict(branch='master', project='', repository='') chd.update(kwargs) ch = self.makeFakeChange(**chd) # fakedb.Change requires changeid instead of number chd['changeid'] = chd['number'] del chd['number'] self.db.insertTestData([fakedb.Change(**chd)]) return ch def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def test_constructor_no_reason(self): sched = self.makeScheduler( basic.SingleBranchScheduler, branch="master") self.assertEqual( sched.reason, "The SingleBranchScheduler scheduler named 'tsched' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( basic.SingleBranchScheduler, branch="master", reason="Changeset") self.assertEqual(sched.reason, "Changeset") def test_constructor_branch_mandatory(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60) def test_constructor_no_branch_but_filter(self): # this shouldn't fail basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, builderNames=['a', 'b'], change_filter=mock.Mock()) def test_constructor_branches_forbidden(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, branches='x') @defer.inlineCallbacks def test_gotChange_treeStableTimer_important(self): # this looks suspiciously like the same test above, because SingleBranchScheduler # is about the same as the test subclass used above sched = self.makeScheduler(basic.SingleBranchScheduler, treeStableTimer=10, branch='master') sched.activate() yield sched.gotChange( self.makeFakeChange(branch='master', number=13), True) self.clock.advance(10) self.assertEqual(self.events, ['B[13]@10']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_createAbsoluteSourceStamps_saveCodebase(self): # check codebase is stored after receiving change. sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler')]) yield sched.activate() yield sched.gotChange(self.mkch(codebase='a', revision='1234:abc', repository='A', number=0), True) yield sched.gotChange(self.mkch(codebase='b', revision='2345:bcd', repository='B', number=1), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='1234:abc', lastChange=0), 'b': dict(branch='master', repository='B', revision='2345:bcd', lastChange=1)}) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_createAbsoluteSourceStamps_older_change(self): # check codebase is not stored if it's older than the most recent sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"a": {"branch": "master", "repository": "A", ' '"revision": "5555:def", "lastChange": 20}}')]) yield sched.activate() # this change is not recorded, since it's older than # change 20 yield sched.gotChange(self.mkch(codebase='a', revision='1234:abc', repository='A', number=10), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)}) yield sched.deactivate() @defer.inlineCallbacks def test_getCodebaseDict(self): sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) sched._lastCodebases = {'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)} cbd = yield sched.getCodebaseDict('a') self.assertEqual(cbd, dict(branch='master', repository='A', revision='5555:def', lastChange=20)) @defer.inlineCallbacks def test_getCodebaseDict_no_createAbsoluteSourceStamps(self): sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=False) sched._lastCodebases = {'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)} cbd = yield sched.getCodebaseDict('a') # _lastCodebases is ignored self.assertEqual(cbd, {'branch': 'master', 'repository': ''}) class AnyBranchScheduler(CommonStuffMixin, scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): SCHEDULERID = 6 OBJECTID = 246 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def test_constructor_branch_forbidden(self): with self.assertRaises(config.ConfigErrors): basic.SingleBranchScheduler(name="tsched", treeStableTimer=60, branch='x') @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_branches(self): """Two changes with different branches get different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master', 'devel', 'boring']) sched.activate() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', number=14), False) yield sched.gotChange(mkch(branch='boring', number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='devel', number=16), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[16]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_repositories(self): """Two repositories, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) yield sched.activate() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', repository="repo", number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', repository="repo", number=14), False) yield sched.gotChange(mkch(branch='master', repository="other_repo", number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', repository="other_repo", number=17), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_projects(self): """Two projects, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) sched.startService() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', project="proj", number=13), True) yield self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', project="proj", number=14), False) yield sched.gotChange(mkch(branch='master', project="other_proj", number=15), False) yield self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', project="other_proj", number=17), True) yield self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() @defer.inlineCallbacks def test_gotChange_treeStableTimer_multiple_codebases(self): """Two codebases, even with the same branch name, have different treeStableTimers""" sched = self.makeScheduler(basic.AnyBranchScheduler, treeStableTimer=10, branches=['master']) sched.startService() def mkch(**kwargs): ch = self.makeFakeChange(**kwargs) self.db.changes.fakeAddChangeInstance(ch) return ch yield sched.gotChange(mkch(branch='master', codebase="base", number=13), True) self.clock.advance(1) # time is now 1 yield sched.gotChange(mkch(branch='master', codebase="base", number=14), False) yield sched.gotChange(mkch(branch='master', codebase="other_base", number=15), False) self.clock.pump([1] * 4) # time is now 5 yield sched.gotChange(mkch(branch='master', codebase="other_base", number=17), True) self.clock.pump([1] * 10) # time is now 15 self.assertEqual(self.events, ['B[13,14]@11', 'B[15,17]@15']) yield sched.deactivate() buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_dependent.py000066400000000000000000000207021361162603000261570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.schedulers import base from buildbot.schedulers import dependent from buildbot.test.fake import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin SUBMITTED_AT_TIME = 111111111 COMPLETE_AT_TIME = 222222222 OBJECTID = 33 SCHEDULERID = 133 UPSTREAM_NAME = 'uppy' class Dependent(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, upstream=None): # build a fake upstream scheduler class Upstream(base.BaseScheduler): def __init__(self, name): self.name = name if not upstream: upstream = Upstream(UPSTREAM_NAME) sched = dependent.Dependent(name='n', builderNames=['b'], upstream=upstream) self.attachScheduler(sched, OBJECTID, SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) return sched def assertBuildsetSubscriptions(self, bsids=None): self.db.state.assertState(OBJECTID, upstream_bsids=bsids) # tests # NOTE: these tests take advantage of the fact that all of the fake # scheduler operations are synchronous, and thus do not return a Deferred. # The Deferred from trigger() is completely processed before this test # method returns. def test_constructor_string_arg(self): with self.assertRaises(config.ConfigErrors): self.makeScheduler(upstream='foo') @defer.inlineCallbacks def test_activate(self): sched = self.makeScheduler() sched.activate() self.assertEqual( sorted([q.filter for q in sched.master.mq.qrefs]), [('buildsets', None, 'complete',), ('buildsets', None, 'new',), ('schedulers', '133', 'updated')]) yield sched.deactivate() self.assertEqual([q.filter for q in sched.master.mq.qrefs], [('schedulers', '133', 'updated')]) def sendBuildsetMessage(self, scheduler_name=None, results=-1, complete=False): """Call callConsumer with a buildset message. Most of the values here are hard-coded to correspond to those in do_test.""" msg = dict( bsid=44, sourcestamps=[], # blah blah blah submitted_at=SUBMITTED_AT_TIME, complete=complete, complete_at=COMPLETE_AT_TIME if complete else None, external_idstring=None, reason='Because', results=results if complete else -1, parent_buildid=None, parent_relationship=None, ) if not complete: msg['scheduler'] = scheduler_name self.master.mq.callConsumer( ('buildsets', '44', 'complete' if complete else 'new'), msg) def do_test(self, scheduler_name, expect_subscription, results, expect_buildset): """Test the dependent scheduler by faking a buildset and subsequent completion from an upstream scheduler. @param scheduler_name: upstream scheduler's name @param expect_subscription: whether to expect the dependent to subscribe to the buildset @param results: results of the upstream scheduler's buildset @param expect_buidlset: whether to expect the dependent to generate a new buildset in response """ sched = self.makeScheduler() sched.activate() # announce a buildset with a matching name.. self.db.insertTestData([ fakedb.SourceStamp(id=93, revision='555', branch='master', project='proj', repository='repo', codebase='cb'), fakedb.Buildset( id=44, submitted_at=SUBMITTED_AT_TIME, complete=False, complete_at=None, external_idstring=None, reason='Because', results=-1, ), fakedb.BuildsetSourceStamp(buildsetid=44, sourcestampid=93), ]) self.sendBuildsetMessage(scheduler_name=scheduler_name, complete=False) # check whether scheduler is subscribed to that buildset if expect_subscription: self.assertBuildsetSubscriptions([44]) else: self.assertBuildsetSubscriptions([]) # pretend that the buildset is finished self.db.buildsets.fakeBuildsetCompletion(bsid=44, result=results) self.sendBuildsetMessage(results=results, complete=True) # and check whether a buildset was added in response if expect_buildset: self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=None, # defaults external_idstring=None, properties=None, reason='downstream', sourcestamps=[93])), ]) else: self.assertEqual(self.addBuildsetCalls, []) def test_related_buildset_SUCCESS(self): return self.do_test(UPSTREAM_NAME, True, SUCCESS, True) def test_related_buildset_WARNINGS(self): return self.do_test(UPSTREAM_NAME, True, WARNINGS, True) def test_related_buildset_FAILURE(self): return self.do_test(UPSTREAM_NAME, True, FAILURE, False) def test_unrelated_buildset(self): return self.do_test('unrelated', False, SUCCESS, False) @defer.inlineCallbacks def test_getUpstreamBuildsets_missing(self): sched = self.makeScheduler() # insert some state, with more bsids than exist self.db.insertTestData([ fakedb.SourceStamp(id=1234), fakedb.Buildset(id=11), fakedb.Buildset(id=13), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=1234), fakedb.Object(id=OBJECTID), fakedb.ObjectState(objectid=OBJECTID, name='upstream_bsids', value_json='[11,12,13]'), ]) # check return value (missing 12) self.assertEqual((yield sched._getUpstreamBuildsets()), [(11, [], False, -1), (13, [1234], False, -1)]) # and check that it wrote the correct value back to the state self.db.state.assertState(OBJECTID, upstream_bsids=[11, 13]) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler() expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler() yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler() yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_forcesched.py000066400000000000000000001055011361162603000263170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.schedulers.forcesched import AnyPropertyParameter from buildbot.schedulers.forcesched import BaseParameter from buildbot.schedulers.forcesched import BooleanParameter from buildbot.schedulers.forcesched import ChoiceStringParameter from buildbot.schedulers.forcesched import CodebaseParameter from buildbot.schedulers.forcesched import CollectedValidationError from buildbot.schedulers.forcesched import FileParameter from buildbot.schedulers.forcesched import FixedParameter from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.forcesched import IntParameter from buildbot.schedulers.forcesched import NestedParameter from buildbot.schedulers.forcesched import PatchParameter from buildbot.schedulers.forcesched import StringParameter from buildbot.schedulers.forcesched import UserNameParameter from buildbot.schedulers.forcesched import oneCodebase from buildbot.test.util import scheduler from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class TestForceScheduler(scheduler.SchedulerMixin, ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 19 SCHEDULERID = 9 maxDiff = None def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, name='testsched', builderNames=None, **kw): if builderNames is None: builderNames = ['a', 'b'] sched = self.attachScheduler( ForceScheduler(name=name, builderNames=builderNames, **kw), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) sched.master.config = config.MasterConfig() self.assertEqual(sched.name, name) return sched # tests def test_compare_branch(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[]), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( branch=FixedParameter("branch", "fishing/pole")))) def test_compare_reason(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], reason=FixedParameter("reason", "no fish for you!")), ForceScheduler(name="testched", builderNames=[], reason=FixedParameter("reason", "thanks for the fish!"))) def test_compare_revision(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( revision=FixedParameter("revision", "fish-v1"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( revision=FixedParameter("revision", "fish-v2")))) def test_compare_repository(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( repository=FixedParameter("repository", "git://pond.org/fisher.git"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( repository=FixedParameter("repository", "svn://ocean.com/trawler/")))) def test_compare_project(self): self.assertNotEqual( ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( project=FixedParameter("project", "fisher"))), ForceScheduler( name="testched", builderNames=[], codebases=oneCodebase( project=FixedParameter("project", "trawler")))) def test_compare_username(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[]), ForceScheduler(name="testched", builderNames=[], username=FixedParameter("username", "The Fisher King "))) def test_compare_properties(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], properties=[]), ForceScheduler(name="testched", builderNames=[], properties=[FixedParameter("prop", "thanks for the fish!")])) def test_compare_codebases(self): self.assertNotEqual( ForceScheduler(name="testched", builderNames=[], codebases=['bar']), ForceScheduler(name="testched", builderNames=[], codebases=['foo'])) @defer.inlineCallbacks def test_basicForce(self): sched = self.makeScheduler() res = yield sched.force('user', builderNames=['a'], branch='a', reason='because', revision='c', repository='d', project='p' ) # only one builder forced, so there should only be one brid self.assertEqual(res, (500, {1000: 100})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) @defer.inlineCallbacks def test_basicForce_reasonString(self): """Same as above, but with a reasonString""" sched = self.makeScheduler( reasonString='%(owner)s wants it %(reason)s') res = yield sched.force('user', builderNames=['a'], branch='a', reason='because', revision='c', repository='d', project='p' ) bsid, brids = res # only one builder forced, so there should only be one brid self.assertEqual(len(brids), 1) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': ['a'], 'properties': {'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form')}, 'reason': 'user wants it because', 'sourcestamps': [{'branch': 'a', 'codebase': '', 'project': 'p', 'repository': 'd', 'revision': 'c'}], 'waited_for': False}), ]) (bsid, dict(reason="user wants it because", brids=brids, external_idstring=None, properties=[('owner', ('user', 'Force Build Form')), ('reason', ('because', 'Force Build Form')), ('scheduler', ('testsched', 'Scheduler')), ], sourcestampsetid=100), {'': dict(branch='a', revision='c', repository='d', codebase='', project='p', sourcestampsetid=100) }) @defer.inlineCallbacks def test_force_allBuilders(self): sched = self.makeScheduler() res = yield sched.force('user', branch='a', reason='because', revision='c', repository='d', project='p', ) self.assertEqual(res, (500, {1000: 100, 1001: 101})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a', 'b'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) @defer.inlineCallbacks def test_force_someBuilders(self): sched = self.makeScheduler(builderNames=['a', 'b', 'c']) res = yield sched.force('user', builderNames=['a', 'b'], branch='a', reason='because', revision='c', repository='d', project='p', ) self.assertEqual(res, (500, {1000: 100, 1001: 101})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a', 'b'], waited_for=False, properties={ 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), }, reason="A build was forced by 'user': because", sourcestamps=[ {'codebase': '', 'branch': 'a', 'revision': 'c', 'repository': 'd', 'project': 'p'}, ])), ]) def test_bad_codebases(self): # codebases must be a list of either string or BaseParameter types with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' must be a " "list of strings or CodebaseParameter objects:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[123],) with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' must be a " "list of strings or CodebaseParameter objects:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[IntParameter('foo')]) # codebases cannot be empty with self.assertRaisesConfigError( "ForceScheduler 'foo': 'codebases' cannot be " "empty; use [CodebaseParameter(codebase='', hide=True)] if needed:"): ForceScheduler(name='foo', builderNames=['bar'], codebases=[]) # codebases cannot be a dictionary # dictType on Python 3 is: "" # dictType on Python 2 is: "" dictType = str(type({})) errMsg = ("ForceScheduler 'foo': 'codebases' should be a list " "of strings or CodebaseParameter, " "not {}".format(dictType)) with self.assertRaisesConfigError(errMsg): ForceScheduler(name='foo', builderNames=['bar'], codebases={'cb': {'branch': 'trunk'}}) @defer.inlineCallbacks def test_good_codebases(self): sched = self.makeScheduler(codebases=['foo', CodebaseParameter('bar')]) res = yield sched.force('user', builderNames=['a'], reason='because', foo_branch='a', foo_revision='c', foo_repository='d', foo_project='p', bar_branch='a2', bar_revision='c2', bar_repository='d2', bar_project='p2' ) bsid, brids = res expProperties = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expProperties, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': 'a2', 'project': 'p2', 'repository': 'd2', 'revision': 'c2', 'codebase': 'bar'}, {'branch': 'a', 'project': 'p', 'repository': 'd', 'revision': 'c', 'codebase': 'foo'}, ])), ]) @defer.inlineCallbacks def test_codebase_with_patch(self): sched = self.makeScheduler(codebases=['foo', CodebaseParameter('bar', patch=PatchParameter())]) res = yield sched.force('user', builderNames=['a'], reason='because', foo_branch='a', foo_revision='c', foo_repository='d', foo_project='p', bar_branch='a2', bar_revision='c2', bar_repository='d2', bar_project='p2', bar_patch_body="xxx" ) bsid, brids = res expProperties = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expProperties, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': 'a2', 'project': 'p2', 'repository': 'd2', 'revision': 'c2', 'codebase': 'bar', 'patch_body': 'xxx', 'patch_author': '', 'patch_subdir': '.', 'patch_comment': '', 'patch_level': 1}, {'branch': 'a', 'project': 'p', 'repository': 'd', 'revision': 'c', 'codebase': 'foo'}, ])), ]) def formatJsonForTest(self, gotJson): ret = "" linestart = "expectJson='" spaces = 7 * 4 + 2 while len(gotJson) > (90 - spaces): gotJson = " " * spaces + linestart + gotJson pos = gotJson[:100].rfind(",") if pos > 0: pos += 2 ret += gotJson[:pos] + "'\n" gotJson = gotJson[pos:] linestart = "'" ret += " " * spaces + linestart + gotJson + "')\n" return ret # value = the value to be sent with the parameter (ignored if req is set) # expect = the expected result (can be an exception type) # klass = the parameter class type # req = use this request instead of the auto-generated one based on value @defer.inlineCallbacks def do_ParameterTest(self, expect, klass, # None=one prop, Exception=exception, dict=many props expectKind=None, owner='user', value=None, req=None, expectJson=None, **kwargs): name = kwargs.setdefault('name', 'p1') # construct one if needed if isinstance(klass, type): prop = klass(**kwargs) else: prop = klass self.assertEqual(prop.name, name) self.assertEqual(prop.label, kwargs.get('label', prop.name)) if expectJson is not None: gotSpec = prop.getSpec() gotJson = json.dumps(gotSpec) expectSpec = json.loads(expectJson) if gotSpec != expectSpec: try: import xerox # pylint: disable=import-outside-toplevel formatted = self.formatJsonForTest(gotJson) print( "You may update the test with (copied to clipboard):\n" + formatted) xerox.copy(formatted) input() except ImportError: print("Note: for quick fix, pip install xerox") self.assertEqual(gotSpec, expectSpec) sched = self.makeScheduler(properties=[prop]) if not req: req = {name: value, 'reason': 'because'} try: bsid, brids = yield sched.force(owner, builderNames=['a'], **req) except Exception as e: if expectKind is not Exception: # an exception is not expected raise if not isinstance(e, expect): # the exception is the wrong kind raise return None # success expect_props = { 'owner': ('user', 'Force Build Form'), 'reason': ('because', 'Force Build Form'), } if expectKind is None: expect_props[name] = (expect, 'Force Build Form') elif expectKind is dict: for k, v in expect.items(): expect_props[k] = (v, 'Force Build Form') else: self.fail("expectKind is wrong type!") # only forced on 'a' self.assertEqual((bsid, brids), (500, {1000: 100})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=['a'], waited_for=False, properties=expect_props, reason="A build was forced by 'user': because", sourcestamps=[ {'branch': '', 'project': '', 'repository': '', 'revision': '', 'codebase': ''}, ])), ]) def test_StringParameter(self): self.do_ParameterTest(value="testedvalue", expect="testedvalue", klass=StringParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "text", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"size": 10, "autopopulate": null}') def test_StringParameter_Required(self): self.do_ParameterTest(value=" ", expect=CollectedValidationError, expectKind=Exception, klass=StringParameter, required=True) def test_StringParameter_maxsize(self): self.do_ParameterTest(value="xx" * 20, expect=CollectedValidationError, expectKind=Exception, klass=StringParameter, maxsize=10) def test_FileParameter_maxsize(self): self.do_ParameterTest(value="xx" * 20, expect=CollectedValidationError, expectKind=Exception, klass=FileParameter, maxsize=10) def test_FileParameter(self): self.do_ParameterTest(value="xx", expect="xx", klass=FileParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "file", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, ' '"maxsize": 10485760, "autopopulate": null}') def test_PatchParameter(self): self.do_ParameterTest(req=dict(p1_author='me', reason="because"), expect={ 'author': 'me', 'body': '', 'comment': '', 'level': 1, 'subdir': '.'}, klass=PatchParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", "autopopulate": null, ' '"tablabel": "p1", "type": "nested", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"layout": "vertical", "columns": 1, "fields": [{"name": "body", ' '"fullName": "p1_body", "label": "body", "tablabel": "body", "autopopulate": null, ' '"type": "file", "default": "", "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": 10485760}, {"name": "level", ' '"fullName": "p1_level", "label": "level", "tablabel": "level", ' '"type": "int", "default": 1, "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10, "autopopulate": null}, ' '{"name": "author", "fullName": "p1_author", "label": "author", ' '"tablabel": "author", "type": "text", "default": "", "autopopulate": null, ' '"required": false, "multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "size": 10}, {"name": "comment", "autopopulate": null, ' '"fullName": "p1_comment", "label": "comment", "tablabel": "comment", ' '"type": "text", "default": "", "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10}, ' '{"name": "subdir", "fullName": "p1_subdir", "label": "subdir", ' '"tablabel": "subdir", "type": "text", "default": ".", "autopopulate": null, ' '"required": false, "multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "size": 10}]}') def test_IntParameter(self): self.do_ParameterTest(value="123", expect=123, klass=IntParameter, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "int", "default": 0, "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"size": 10, "autopopulate": null}') def test_FixedParameter(self): self.do_ParameterTest(value="123", expect="321", klass=FixedParameter, default="321", expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "fixed", "default": "321", ' '"required": false, "multiple": false, "regex": null, "hide": true, ' '"maxsize": null, "autopopulate": null}') def test_BooleanParameter_True(self): req = dict(p1=True, reason='because') self.do_ParameterTest(value="123", expect=True, klass=BooleanParameter, req=req, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "bool", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, ' '"maxsize": null, "autopopulate": null}') def test_BooleanParameter_False(self): req = dict(p2=True, reason='because') self.do_ParameterTest(value="123", expect=False, klass=BooleanParameter, req=req) def test_UserNameParameter(self): email = "test " self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson='{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}') def test_UserNameParameterIsValidMail(self): email = "test@buildbot.net" self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson='{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}') def test_UserNameParameterIsValidMailBis(self): email = "" self.do_ParameterTest(value=email, expect=email, klass=UserNameParameter(), name="username", label="Your name:", expectJson='{"name": "username", "fullName": "username", ' '"label": "Your name:", "tablabel": "Your name:", "type": "username", ' '"default": "", "required": false, "multiple": false, "regex": null, ' '"hide": false, "maxsize": null, "size": 30, ' '"need_email": true, "autopopulate": null}') def test_ChoiceParameter(self): self.do_ParameterTest(value='t1', expect='t1', klass=ChoiceStringParameter, choices=[ 't1', 't2'], expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "list", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"choices": ["t1", "t2"], "strict": true, "autopopulate": null}') def test_ChoiceParameterError(self): self.do_ParameterTest(value='t3', expect=CollectedValidationError, expectKind=Exception, klass=ChoiceStringParameter, choices=[ 't1', 't2'], debug=False) def test_ChoiceParameterError_notStrict(self): self.do_ParameterTest(value='t1', expect='t1', strict=False, klass=ChoiceStringParameter, choices=['t1', 't2']) def test_ChoiceParameterMultiple(self): self.do_ParameterTest(value=['t1', 't2'], expect=['t1', 't2'], klass=ChoiceStringParameter, choices=['t1', 't2'], multiple=True, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", ' '"tablabel": "p1", "type": "list", "default": "", "required": false, ' '"multiple": true, "regex": null, "hide": false, "maxsize": null, ' '"choices": ["t1", "t2"], "strict": true, "autopopulate": null}') def test_ChoiceParameterMultipleError(self): self.do_ParameterTest(value=['t1', 't3'], expect=CollectedValidationError, expectKind=Exception, klass=ChoiceStringParameter, choices=[ 't1', 't2'], multiple=True, debug=False) def test_NestedParameter(self): fields = [ IntParameter(name="foo") ] self.do_ParameterTest(req=dict(p1_foo='123', reason="because"), expect=dict(foo=123), klass=NestedParameter, fields=fields, expectJson='{"name": "p1", "fullName": "p1", "label": "p1", "autopopulate": null, ' '"tablabel": "p1", "type": "nested", "default": "", "required": false, ' '"multiple": false, "regex": null, "hide": false, "maxsize": null, ' '"layout": "vertical", "columns": 1, "fields": [{"name": "foo", ' '"fullName": "p1_foo", "label": "foo", "tablabel": "foo", "autopopulate": null, ' '"type": "int", "default": 0, "required": false, "multiple": false, ' '"regex": null, "hide": false, "maxsize": null, "size": 10}]}') def test_NestedNestedParameter(self): fields = [ NestedParameter(name="inner", fields=[ StringParameter(name='str'), AnyPropertyParameter(name='any') ]), IntParameter(name="foo") ] self.do_ParameterTest(req=dict(p1_foo='123', p1_inner_str="bar", p1_inner_any_name="hello", p1_inner_any_value="world", reason="because"), expect=dict( foo=123, inner=dict(str="bar", hello="world")), klass=NestedParameter, fields=fields) def test_NestedParameter_nullname(self): # same as above except "p1" and "any" are skipped fields = [ NestedParameter(name="inner", fields=[ StringParameter(name='str'), AnyPropertyParameter(name='') ]), IntParameter(name="foo"), NestedParameter(name='bar', fields=[ NestedParameter( name='', fields=[AnyPropertyParameter(name='a')]), NestedParameter( name='', fields=[AnyPropertyParameter(name='b')]) ]) ] self.do_ParameterTest(req=dict(foo='123', inner_str="bar", inner_name="hello", inner_value="world", reason="because", bar_a_name="a", bar_a_value="7", bar_b_name="b", bar_b_value="8"), expect=dict(foo=123, inner=dict(str="bar", hello="world"), bar={'a': '7', 'b': '8'}), expectKind=dict, klass=NestedParameter, fields=fields, name='') def test_bad_reason(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': reason must be a StringParameter"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], reason="foo") def test_bad_username(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': username must be a StringParameter"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo") def test_notstring_name(self): with self.assertRaisesConfigError( "ForceScheduler name must be a unicode string:"): ForceScheduler(name=1234, builderNames=[], codebases=['bar'], username="foo") def test_notidentifier_name(self): # FIXME: this test should be removed eventually when bug 3460 gets a # real fix with self.assertRaisesConfigError( "ForceScheduler name must be an identifier: 'my scheduler'"): ForceScheduler(name='my scheduler', builderNames=[], codebases=['bar'], username="foo") def test_emptystring_name(self): with self.assertRaisesConfigError( "ForceScheduler name must not be empty:"): ForceScheduler(name='', builderNames=[], codebases=['bar'], username="foo") def test_integer_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=1234, codebases=['bar'], username="foo") def test_listofints_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=[1234], codebases=['bar'], username="foo") def test_listofunicode_builderNames(self): ForceScheduler(name='testsched', builderNames=['a', 'b']) def test_listofmixed_builderNames(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': builderNames must be a list of strings:"): ForceScheduler(name='testsched', builderNames=['test', 1234], codebases=['bar'], username="foo") def test_integer_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=1234) def test_listofints_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=[1234, 2345]) def test_listofmixed_properties(self): with self.assertRaisesConfigError( "ForceScheduler 'testsched': properties must be a list of BaseParameters:"): ForceScheduler(name='testsched', builderNames=[], codebases=['bar'], username="foo", properties=[BaseParameter(name="test",), 4567]) def test_novalue_to_parameter(self): with self.assertRaisesConfigError( "Use default='1234' instead of value=... to give a default Parameter value"): BaseParameter(name="test", value="1234") buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_manager.py000066400000000000000000000157701361162603000256340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers import base from buildbot.schedulers import manager class SchedulerManager(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.next_objectid = 13 self.objectids = {} self.master = mock.Mock() self.master.master = self.master def getObjectId(sched_name, class_name): k = (sched_name, class_name) try: rv = self.objectids[k] except KeyError: rv = self.objectids[k] = self.next_objectid self.next_objectid += 1 return defer.succeed(rv) self.master.db.state.getObjectId = getObjectId def getScheduler(sched_id): return defer.succeed(dict(enabled=True)) self.master.db.schedulers.getScheduler = getScheduler self.new_config = mock.Mock() self.sm = manager.SchedulerManager() yield self.sm.setServiceParent(self.master) yield self.sm.startService() def tearDown(self): if self.sm.running: return self.sm.stopService() class Sched(base.BaseScheduler): # changing sch.attr should make a scheduler look "updated" compare_attrs = ('attr', ) already_started = False reconfig_count = 0 def startService(self): assert not self.already_started assert self.master is not None assert self.objectid is not None self.already_started = True return super().startService() @defer.inlineCallbacks def stopService(self): yield super().stopService() assert self.master is not None assert self.objectid is not None def __repr__(self): return "{}(attr={})".format(self.__class__.__name__, self.attr) class ReconfigSched(Sched): def reconfigServiceWithSibling(self, new_config): self.reconfig_count += 1 self.attr = new_config.attr return super().reconfigServiceWithSibling(new_config) class ReconfigSched2(ReconfigSched): pass def makeSched(self, cls, name, attr='alpha'): sch = cls(name=name, builderNames=['x'], properties={}) sch.attr = attr return sch # tests @defer.inlineCallbacks def test_reconfigService_add_and_change_and_remove(self): sch1 = self.makeSched(self.ReconfigSched, 'sch1', attr='alpha') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.reconfig_count, 1) sch1_new = self.makeSched(self.ReconfigSched, 'sch1', attr='beta') sch2 = self.makeSched(self.ReconfigSched, 'sch2', attr='alpha') self.new_config.schedulers = dict(sch1=sch1_new, sch2=sch2) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 is still the active scheduler, and has been reconfig'd, # and has the correct attribute self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.attr, 'beta') self.assertEqual(sch1.reconfig_count, 2) self.assertIdentical(sch1_new.parent, None) self.assertIdentical(sch1_new.master, None) self.assertIdentical(sch2.parent, self.sm) self.assertIdentical(sch2.master, self.master) self.new_config.schedulers = {} self.assertEqual(sch1.running, True) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertEqual(sch1.running, False) @defer.inlineCallbacks def test_reconfigService_class_name_change(self): sch1 = self.makeSched(self.ReconfigSched, 'sch1') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) self.assertEqual(sch1.reconfig_count, 1) sch1_new = self.makeSched(self.ReconfigSched2, 'sch1') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had its class name change, so sch1_new is now the active # instance self.assertIdentical(sch1_new.parent, self.sm) self.assertIdentical(sch1_new.master, self.master) @defer.inlineCallbacks def test_reconfigService_not_reconfigurable(self): sch1 = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) sch1_new = self.makeSched(self.Sched, 'sch1', attr='alpha') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had parameter change but is not reconfigurable, so sch1_new is now the active # instance self.assertEqual(sch1_new.running, True) self.assertEqual(sch1.running, False) self.assertIdentical(sch1_new.parent, self.sm) self.assertIdentical(sch1_new.master, self.master) @defer.inlineCallbacks def test_reconfigService_not_reconfigurable_no_change(self): sch1 = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(sch1.parent, self.sm) self.assertIdentical(sch1.master, self.master) sch1_new = self.makeSched(self.Sched, 'sch1', attr='beta') self.new_config.schedulers = dict(sch1=sch1_new) yield self.sm.reconfigServiceWithBuildbotConfig(self.new_config) # sch1 had its class name change, so sch1_new is now the active # instance self.assertIdentical(sch1_new.parent, None) self.assertEqual(sch1_new.running, False) self.assertIdentical(sch1_new.master, None) self.assertEqual(sch1.running, True) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_timed_Nightly.py000066400000000000000000000460111361162603000270120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import time import mock from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.changes import filter from buildbot.schedulers import timed from buildbot.test.fake import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class Nightly(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): try: datetime.datetime.fromtimestamp(1) except OSError: skip = ("Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") OBJECTID = 132 SCHEDULERID = 32 # not all timezones are even multiples of 1h from GMT. This variable # holds the number of seconds ahead of the hour for the current timezone. # This is then added to the clock before each test is run (to get to 0 # minutes past the hour) and subtracted before the time offset is reported. localtime_offset = time.timezone % 3600 def makeScheduler(self, **kwargs): sched = self.attachScheduler(timed.Nightly(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) self.master.db.insertTestData( [fakedb.Builder(name=bname) for bname in kwargs.get("builderNames", [])]) # add a Clock to help checking timing issues sched._reactor = self.reactor self.reactor.advance(self.localtime_offset) # get to 0 min past the hour self.addBuildsetCallTimes = [] def recordTimes(timeList, method): def timedMethod(**kw): timeList.append(self.reactor.seconds() - self.localtime_offset) return method(**kw) return timedMethod sched.addBuildsetForSourceStampsWithDefaults = recordTimes( self.addBuildsetCallTimes, sched.addBuildsetForSourceStampsWithDefaults) sched.addBuildsetForChanges = recordTimes( self.addBuildsetCallTimes, sched.addBuildsetForChanges) # see self.assertConsumingChanges self.consumingChanges = None def startConsumingChanges(**kwargs): self.consumingChanges = kwargs return defer.succeed(None) sched.startConsumingChanges = startConsumingChanges return sched def mkbs(self, **kwargs): # create buildset for expected_buildset in assertBuildset. bs = dict(reason="The Nightly scheduler named 'test' triggered this build", external_idstring='', sourcestampsetid=100, properties=[('scheduler', ('test', 'Scheduler'))]) bs.update(kwargs) return bs def mkss(self, **kwargs): # create sourcestamp for expected_sourcestamps in assertBuildset. ss = dict( branch='master', project='', repository='', sourcestampsetid=100) ss.update(kwargs) return ss def mkch(self, **kwargs): # create changeset and insert in database. chd = dict(branch='master', project='', repository='') chd.update(kwargs) ch = self.makeFakeChange(**chd) # fakedb.Change requires changeid instead of number chd['changeid'] = chd['number'] del chd['number'] self.db.insertTestData([fakedb.Change(**chd)]) return ch def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def assertConsumingChanges(self, **kwargs): self.assertEqual(self.consumingChanges, kwargs) # Tests def test_constructor_no_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') self.assertEqual( sched.reason, "The Nightly scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default', reason="hourly") self.assertEqual(sched.reason, "hourly") def test_constructor_change_filter(self): sched = self.makeScheduler(name='test', builderNames=['test'], branch=None, change_filter=filter.ChangeFilter(category_re="fo+o")) assert sched.change_filter def test_constructor_month(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default', month='1') self.assertEqual(sched.month, "1") @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_start_build(self): sched = self.makeScheduler( name='test', builderNames=['test'], branch='default') yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.startBuild() self.assertEqual(r, None) # end-to-end tests: let's see the scheduler in action @defer.inlineCallbacks def test_iterations_simple(self): # note that Nightly works in local time, but the TestReactor always # starts at midnight UTC, so be careful not to use times that are # timezone dependent -- stick to minutes-past-the-half-hour, as some # timezones are multiples of 30 minutes off from UTC sched = self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[10, 20, 21, 40, 50, 51]) # add a change classification self.db.schedulers.fakeClassifications(self.SCHEDULERID, {19: True}) yield sched.activate() # check that the classification has been flushed, since this # invocation has not requested onlyIfChanged self.db.schedulers.assertClassifications(self.SCHEDULERID, {}) self.reactor.advance(0) while self.reactor.seconds() < self.localtime_offset + 30 * 60: self.reactor.advance(60) self.assertEqual(self.addBuildsetCallTimes, [600, 1200, 1260]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False}), ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False}), ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=1260 + self.localtime_offset) yield sched.deactivate() def test_iterations_simple_with_branch(self): # see timezone warning above sched = self.makeScheduler(name='test', builderNames=['test'], branch='master', minute=[5, 35]) sched.activate() self.reactor.advance(0) while self.reactor.seconds() < self.localtime_offset + 10 * 60: self.reactor.advance(60) self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'sourcestamps': [{'codebase': ''}], 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=300 + self.localtime_offset) d = sched.deactivate() return d def do_test_iterations_onlyIfChanged(self, *changes_at, **kwargs): fII = mock.Mock(name='fII') self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[5, 25, 45], onlyIfChanged=True, fileIsImportant=fII, **kwargs) return self.do_test_iterations_onlyIfChanged_test(fII, *changes_at) @defer.inlineCallbacks def do_test_iterations_onlyIfChanged_test(self, fII, *changes_at): yield self.sched.activate() # check that the scheduler has started to consume changes self.assertConsumingChanges(fileIsImportant=fII, change_filter=None, onlyImportant=False) # manually run the clock forward through a half-hour, allowing any # excitement to take place changes_at = list(changes_at) self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < self.localtime_offset + 30 * 60: # inject any new changes.. while (changes_at and self.reactor.seconds() >= self.localtime_offset + changes_at[0][0]): when, newchange, important = changes_at.pop(0) self.db.changes.fakeAddChangeInstance(newchange) yield self.sched.gotChange(newchange, important).addErrback(log.err) # and advance the clock by a minute self.reactor.advance(60) @defer.inlineCallbacks def test_iterations_onlyIfChanged_no_changes(self): yield self.do_test_iterations_onlyIfChanged() self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_unimp_changes(self): yield self.do_test_iterations_onlyIfChanged( (60, mock.Mock(), False), (600, mock.Mock(), False)) self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_off_branch_changes(self): yield self.do_test_iterations_onlyIfChanged( (60, self.makeFakeChange(number=1, branch='testing'), True), (1700, self.makeFakeChange(number=2, branch='staging'), True)) self.assertEqual(self.addBuildsetCalls, []) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_mixed_changes(self): yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange(number=3, branch=None), False), (130, self.makeFakeChange(number=4, branch='offbranch'), True), (1200, self.makeFakeChange(number=5, branch=None), True), (1201, self.makeFakeChange(number=6, branch=None), False), (1202, self.makeFakeChange(number=7, branch='offbranch'), True)) # note that the changeid list includes the unimportant changes, but not the # off-branch changes, and note that no build took place at 300s, as no important # changes had yet arrived self.assertEqual(self.addBuildsetCallTimes, [1500]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3, 5, 6], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_oneChanged(self): # Test createAbsoluteSourceStamps=True when only one codebase has # changed yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange( number=3, codebase='a', revision='2345:bcd'), True), codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3)}) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_oneChanged_loadOther(self): # Test createAbsoluteSourceStamps=True when only one codebase has changed, # but the other was previously changed fII = mock.Mock(name='fII') self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[5, 25, 45], onlyIfChanged=True, fileIsImportant=fII, codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='Nightly'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"b": {"branch": "master", "repository": "B", "revision": "1234:abc", "lastChange": 2}}')]) yield self.do_test_iterations_onlyIfChanged_test(fII, (120, self.makeFakeChange(number=3, codebase='a', revision='2345:bcd'), True)) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3), 'b': dict(revision='1234:abc', branch="master", repository='B', lastChange=2)}) yield self.sched.deactivate() @defer.inlineCallbacks def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_bothChanged(self): # Test createAbsoluteSourceStamps=True when both codebases have changed yield self.do_test_iterations_onlyIfChanged( (120, self.makeFakeChange( number=3, codebase='a', revision='2345:bcd'), True), (122, self.makeFakeChange( number=4, codebase='b', revision='1234:abc'), True), codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3, 4], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3), 'b': dict(revision='1234:abc', branch=None, repository='', lastChange=4)}) yield self.sched.deactivate() buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_timed_NightlyBase.py000066400000000000000000000364051361162603000276130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin try: from multiprocessing import Process assert Process except ImportError: Process = None class NightlyBase(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): """detailed getNextBuildTime tests""" OBJECTID = 133 SCHEDULERID = 33 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def makeScheduler(self, firstBuildDuration=0, **kwargs): return self.attachScheduler(timed.NightlyBase(**kwargs), self.OBJECTID, self.SCHEDULERID) @defer.inlineCallbacks def do_getNextBuildTime_test(self, sched, *expectations): for lastActuated, expected in expectations: # convert from tuples to epoch time (in local timezone) lastActuated_ep, expected_ep = [ time.mktime(t + (0,) * (8 - len(t)) + (-1,)) for t in (lastActuated, expected)] got_ep = yield sched.getNextBuildTime(lastActuated_ep) self.assertEqual(got_ep, expected_ep, "%s -> %s != %s" % (lastActuated, time.localtime(got_ep), expected)) def test_getNextBuildTime_hourly(self): sched = self.makeScheduler(name='test', builderNames=['test']) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 15, 1), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 59, 1), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 3, 59, 59), (2011, 1, 1, 4, 0, 0)), ((2011, 1, 1, 23, 22, 22), (2011, 1, 2, 0, 0, 0)), ((2011, 1, 1, 23, 59, 0), (2011, 1, 2, 0, 0, 0)), ) def test_getNextBuildTime_minutes_single(self): # basically the same as .._hourly sched = self.makeScheduler(name='test', builderNames=['test'], minute=4) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 3, 4, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 4, 4, 0)), ) def test_getNextBuildTime_minutes_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[4, 34]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0, 0), (2011, 1, 1, 3, 4, 0)), ((2011, 1, 1, 3, 15, 0), (2011, 1, 1, 3, 34, 0)), ((2011, 1, 1, 3, 34, 0), (2011, 1, 1, 4, 4, 0)), ((2011, 1, 1, 3, 59, 1), (2011, 1, 1, 4, 4, 0)), ) def test_getNextBuildTime_minutes_star(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute='*') return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 11, 30), (2011, 1, 1, 3, 12, 0)), ((2011, 1, 1, 3, 12, 0), (2011, 1, 1, 3, 13, 0)), ((2011, 1, 1, 3, 59, 0), (2011, 1, 1, 4, 0, 0)), ) def test_getNextBuildTime_hours_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=4) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0), (2011, 1, 1, 4, 0)), ((2011, 1, 1, 13, 0), (2011, 1, 2, 4, 0)), ) def test_getNextBuildTime_hours_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=[7, 19]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 0), (2011, 1, 1, 7, 0)), ((2011, 1, 1, 7, 1), (2011, 1, 1, 19, 0)), ((2011, 1, 1, 18, 59), (2011, 1, 1, 19, 0)), ((2011, 1, 1, 19, 59), (2011, 1, 2, 7, 0)), ) def test_getNextBuildTime_hours_minutes(self): sched = self.makeScheduler(name='test', builderNames=['test'], hour=13, minute=19) return self.do_getNextBuildTime_test(sched, ((2011, 1, 1, 3, 11), (2011, 1, 1, 13, 19)), ((2011, 1, 1, 13, 19), (2011, 1, 2, 13, 19)), ((2011, 1, 1, 23, 59), (2011, 1, 2, 13, 19)), ) def test_getNextBuildTime_month_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=3) return self.do_getNextBuildTime_test(sched, ((2011, 2, 27, 3, 11), (2011, 3, 1, 0, 0)), # still hourly! ((2011, 3, 1, 1, 11), (2011, 3, 1, 2, 0)), ) def test_getNextBuildTime_month_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=[4, 6]) return self.do_getNextBuildTime_test(sched, ((2011, 3, 30, 3, 11), (2011, 4, 1, 0, 0)), # still hourly! ((2011, 4, 1, 1, 11), (2011, 4, 1, 2, 0)), ((2011, 5, 29, 3, 11), (2011, 6, 1, 0, 0)), ) def test_getNextBuildTime_month_dayOfMonth(self): sched = self.makeScheduler(name='test', builderNames=['test'], month=[3, 6], dayOfMonth=[15]) return self.do_getNextBuildTime_test(sched, ((2011, 2, 12, 3, 11), (2011, 3, 15, 0, 0)), ((2011, 3, 12, 3, 11), (2011, 3, 15, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=10) return self.do_getNextBuildTime_test(sched, ((2011, 1, 9, 3, 0), (2011, 1, 10, 0, 0)), # still hourly! ((2011, 1, 10, 3, 0), (2011, 1, 10, 4, 0)), ((2011, 1, 30, 3, 0), (2011, 2, 10, 0, 0)), ((2011, 12, 30, 11, 0), (2012, 1, 10, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_multiple(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=[10, 20, 30]) return self.do_getNextBuildTime_test(sched, ((2011, 1, 9, 22, 0), (2011, 1, 10, 0, 0)), ((2011, 1, 19, 22, 0), (2011, 1, 20, 0, 0)), ((2011, 1, 29, 22, 0), (2011, 1, 30, 0, 0)), # no Feb 30! ((2011, 2, 29, 22, 0), (2011, 3, 10, 0, 0)), ) def test_getNextBuildTime_dayOfMonth_hours_minutes(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfMonth=15, hour=20, minute=30) return self.do_getNextBuildTime_test(sched, ((2011, 1, 13, 22, 19), (2011, 1, 15, 20, 30)), ((2011, 1, 15, 19, 19), (2011, 1, 15, 20, 30)), ((2011, 1, 15, 20, 29), (2011, 1, 15, 20, 30)), ) def test_getNextBuildTime_dayOfWeek_single(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=1) # Tuesday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_single_as_string(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek="1") # Tuesday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_multiple_as_string(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek="tue,3") # Tuesday, Thursday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 0, 0)), # still hourly! ((2011, 1, 4, 19, 19), (2011, 1, 4, 20, 0)), ((2011, 1, 5, 22, 19), (2011, 1, 6, 0, 0)), # still hourly! ((2011, 1, 6, 19, 19), (2011, 1, 6, 20, 0)), ) def test_getNextBuildTime_dayOfWeek_multiple_hours(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=[1, 3], hour=1) # Tuesday, Thursday (2011-1-1 was a Saturday) return self.do_getNextBuildTime_test(sched, ((2011, 1, 3, 22, 19), (2011, 1, 4, 1, 0)), ((2011, 1, 4, 22, 19), (2011, 1, 6, 1, 0)), ) def test_getNextBuildTime_dayOfWeek_dayOfMonth(self): sched = self.makeScheduler(name='test', builderNames=['test'], dayOfWeek=[1, 4], dayOfMonth=5, hour=1) return self.do_getNextBuildTime_test(sched, # Tues ((2011, 1, 3, 22, 19), (2011, 1, 4, 1, 0)), # 5th ((2011, 1, 4, 22, 19), (2011, 1, 5, 1, 0)), # Thurs ((2011, 1, 5, 22, 19), (2011, 1, 7, 1, 0)), ) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_timed_NightlyTriggerable.py000066400000000000000000000303131361162603000311600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime from twisted.internet import task from twisted.trial import unittest from buildbot.process import properties from buildbot.schedulers import timed from buildbot.test.fake import fakedb from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class NightlyTriggerable(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): try: datetime.datetime.fromtimestamp(1) except OSError: skip = ("Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") SCHEDULERID = 327 OBJECTID = 1327 def makeScheduler(self, firstBuildDuration=0, **kwargs): sched = self.attachScheduler(timed.NightlyTriggerable(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) # add a Clock to help checking timing issues self.clock = sched._reactor = task.Clock() return sched def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() # utilities def assertBuildsetAdded(self, sourcestamps=None, properties=None): if sourcestamps is None: sourcestamps = {} if properties is None: properties = {} properties['scheduler'] = ('test', 'Scheduler') self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', dict( builderNames=None, # uses the default properties=properties, reason="The NightlyTriggerable scheduler named 'test' " "triggered this build", sourcestamps=sourcestamps, waited_for=False)), ]) self.addBuildsetCalls = [] def assertNoBuildsetAdded(self): self.assertEqual(self.addBuildsetCalls, []) # tests def test_constructor_no_reason(self): sched = self.makeScheduler(name='test', builderNames=['test']) self.assertEqual( sched.reason, "The NightlyTriggerable scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], reason="hourlytriggerable") self.assertEqual(sched.reason, "hourlytriggerable") def test_constructor_month(self): sched = self.makeScheduler( name='test', builderNames=['test'], month='1') self.assertEqual(sched.month, "1") def test_timer_noBuilds(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertEqual(self.addBuildsetCalls, []) def test_timer_oneTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(revision='myrev', branch='br', project='p', repository='r', codebase='cb'), ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_timer_twoTriggers(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev1', branch='br', project='p', repository='r') ], set_props=None) sched.trigger(False, [ dict(codebase='cb', revision='myrev2', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', # builds the second trigger's revision revision='myrev2'), ]) def test_timer_oneTrigger_then_noBuild(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) self.clock.advance(60 * 60) # Run for 1h # no trigger, so the second did not build self.assertNoBuildsetAdded() def test_timer_oneTriggers_then_oneTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev1', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev1'), ]) sched.trigger(False, [ dict(codebase='cb', revision='myrev2', branch='br', project='p', repository='r') ], set_props=None) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev2'), ]) def test_savedTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ [ {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} ], {}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_savedTrigger_dict(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ { "cb": {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} }, {}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_saveTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() (idsDeferred, d) = sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], set_props=None) @d.addCallback def cb(_): self.db.state.assertState(self.SCHEDULERID, lastTrigger=[[ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], {}, None, None]) return d def test_saveTrigger_noTrigger(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() (idsDeferre, d) = sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], set_props=None) self.clock.advance(60 * 60) # Run for 1h @d.addCallback def cb(_): self.db.state.assertState(self.SCHEDULERID, lastTrigger=None) return d def test_triggerProperties(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], properties.Properties(testprop='test')) self.db.state.assertState(self.SCHEDULERID, lastTrigger=[[ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], {'testprop': ['test', 'TEST']}, None, None]) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded( properties=dict(testprop=('test', 'TEST')), sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) def test_savedProperties(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) value_json = \ '[ [ {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} ], ' \ '{"testprop": ["test", "TEST"]}, null, null ]' self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded( properties={'testprop': ('test', 'TEST')}, sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ]) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_timed_Periodic.py000066400000000000000000000231051361162603000271310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TestException(Exception): pass class Periodic(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 23 SCHEDULERID = 3 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def makeScheduler(self, firstBuildDuration=0, firstBuildError=False, exp_branch=None, **kwargs): self.sched = sched = timed.Periodic(**kwargs) sched._reactor = self.reactor self.attachScheduler(self.sched, self.OBJECTID, self.SCHEDULERID) # keep track of builds in self.events self.events = [] def addBuildsetForSourceStampsWithDefaults(reason, sourcestamps, waited_for=False, properties=None, builderNames=None, **kw): self.assertIn('Periodic scheduler named', reason) # TODO: check branch isFirst = (self.events == []) if self.reactor.seconds() == 0 and firstBuildError: raise TestException() self.events.append('B@%d' % self.reactor.seconds()) if isFirst and firstBuildDuration: d = defer.Deferred() self.reactor.callLater(firstBuildDuration, d.callback, None) return d return defer.succeed(None) sched.addBuildsetForSourceStampsWithDefaults = addBuildsetForSourceStampsWithDefaults # handle state locally self.state = {} def getState(k, default): return defer.succeed(self.state.get(k, default)) sched.getState = getState def setState(k, v): self.state[k] = v return defer.succeed(None) sched.setState = setState return sched # tests def test_constructor_invalid(self): with self.assertRaises(config.ConfigErrors): timed.Periodic(name='test', builderNames=['test'], periodicBuildTimer=-2) def test_constructor_no_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], periodicBuildTimer=10) self.assertEqual( sched.reason, "The Periodic scheduler named 'test' triggered this build") def test_constructor_reason(self): sched = self.makeScheduler( name='test', builderNames=['test'], periodicBuildTimer=10, reason="periodic") self.assertEqual(sched.reason, "periodic") def test_iterations_simple(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@13', 'B@26']) self.assertEqual(self.state.get('last_build'), 26) d = sched.deactivate() return d def test_iterations_simple_branch(self): sched = self.makeScheduler(exp_branch='newfeature', name='test', builderNames=['test'], periodicBuildTimer=13, branch='newfeature') sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@13', 'B@26']) self.assertEqual(self.state.get('last_build'), 26) d = sched.deactivate() return d def test_iterations_long(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=10, firstBuildDuration=15) # takes a while to start a build sched.activate() self.reactor.advance(0) # let it trigger the first (longer) build while self.reactor.seconds() < 40: self.reactor.advance(1) self.assertEqual(self.events, ['B@0', 'B@15', 'B@25', 'B@35']) self.assertEqual(self.state.get('last_build'), 35) d = sched.deactivate() return d @defer.inlineCallbacks def test_start_build_error(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=10, firstBuildError=True) # error during first build start yield sched.activate() self.reactor.advance(0) # let it trigger the first (error) build while self.reactor.seconds() < 40: self.reactor.advance(1) self.assertEqual(self.events, ['B@10', 'B@20', 'B@30', 'B@40']) self.assertEqual(self.state.get('last_build'), 40) self.assertEqual(1, len(self.flushLoggedErrors(TestException))) yield sched.deactivate() def test_iterations_stop_while_starting_build(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13, firstBuildDuration=6) # takes a while to start a build sched.activate() self.reactor.advance(0) # let it trigger the first (longer) build self.reactor.advance(3) # get partway into that build d = sched.deactivate() # begin stopping the service d.addCallback( lambda _: self.events.append('STOP@%d' % self.reactor.seconds())) # run the clock out while self.reactor.seconds() < 40: self.reactor.advance(1) # note that the deactivate completes after the first build completes, and no # subsequent builds occur self.assertEqual(self.events, ['B@0', 'STOP@6']) self.assertEqual(self.state.get('last_build'), 0) return d def test_iterations_with_initial_state(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # so next build should start in 6s self.state['last_build'] = self.reactor.seconds() - 7 sched.activate() self.reactor.advance(0) # let it trigger the first build while self.reactor.seconds() < 30: self.reactor.advance(1) self.assertEqual(self.events, ['B@6', 'B@19']) self.assertEqual(self.state.get('last_build'), 19) d = sched.deactivate() return d @defer.inlineCallbacks def test_getNextBuildTime_None(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # given None, build right away t = yield sched.getNextBuildTime(None) self.assertEqual(t, 0) @defer.inlineCallbacks def test_getNextBuildTime_given(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) # given a time, add the periodicBuildTimer to it t = yield sched.getNextBuildTime(20) self.assertEqual(t, 33) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_start_build(self): sched = self.makeScheduler(name='test', builderNames=['test'], periodicBuildTimer=13) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.startBuild() self.assertEqual(r, None) buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_timed_Timed.py000066400000000000000000000035121361162603000264350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot.schedulers import timed from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class Timed(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 928754 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() class Subclass(timed.Timed): def getNextBuildTime(self, lastActuation): self.got_lastActuation = lastActuation return defer.succeed((lastActuation or 1000) + 60) def startBuild(self): self.started_build = True return defer.succeed(None) def makeScheduler(self, firstBuildDuration=0, **kwargs): sched = self.attachScheduler(self.Subclass(**kwargs), self.OBJECTID) self.clock = sched._reactor = task.Clock() return sched # tests # note that most of the heavy-lifting for testing this class is handled by # the subclasses' tests, as that's the more natural place for it buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_triggerable.py000066400000000000000000000324161361162603000265050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.process import properties from buildbot.schedulers import triggerable from buildbot.test.fake import fakedb from buildbot.test.util import interfaces from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TriggerableInterfaceTest(unittest.TestCase, interfaces.InterfaceTests): def test_interface(self): self.assertInterfacesImplemented(triggerable.Triggerable) class Triggerable(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 33 SCHEDULERID = 13 def setUp(self): self.setUpTestReactor() # Necessary to get an assertable submitted_at time. self.reactor.advance(946684799) self.setUpScheduler() self.subscription = None def tearDown(self): self.tearDownScheduler() def makeScheduler(self, overrideBuildsetMethods=False, **kwargs): self.master.db.insertTestData([fakedb.Builder(id=77, name='b')]) sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b'], **kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=overrideBuildsetMethods) return sched @defer.inlineCallbacks def assertTriggeredBuildset(self, idsDeferred, waited_for, properties=None, sourcestamps=None): if properties is None: properties = {} bsid, brids = yield idsDeferred properties.update({'scheduler': ('n', 'Scheduler')}) self.assertEqual( self.master.db.buildsets.buildsets[bsid]['properties'], properties, ) buildset = yield self.master.db.buildsets.getBuildset(bsid) from datetime import datetime from buildbot.util import UTC ssids = buildset.pop('sourcestamps') self.assertEqual( buildset, { 'bsid': bsid, 'complete': False, 'complete_at': None, 'external_idstring': None, 'reason': "The Triggerable scheduler named 'n' triggered this build", 'results': -1, 'submitted_at': datetime(1999, 12, 31, 23, 59, 59, tzinfo=UTC), 'parent_buildid': None, 'parent_relationship': None, } ) actual_sourcestamps = yield defer.gatherResults([ self.master.db.sourcestamps.getSourceStamp(ssid) for ssid in ssids ]) self.assertEqual(len(sourcestamps), len(actual_sourcestamps)) for expected_ss, actual_ss in zip(sourcestamps, actual_sourcestamps): actual_ss = actual_ss.copy() # We don't care if the actual sourcestamp has *more* attributes # than expected. for key in list(actual_ss.keys()): if key not in expected_ss: del actual_ss[key] self.assertEqual(expected_ss, actual_ss) for brid in brids.values(): buildrequest = yield self.master.db.buildrequests.getBuildRequest(brid) self.assertEqual( buildrequest, { 'buildrequestid': brid, 'buildername': 'b', 'builderid': 77, 'buildsetid': bsid, 'claimed': False, 'claimed_at': None, 'complete': False, 'complete_at': None, 'claimed_by_masterid': None, 'priority': 0, 'results': -1, 'submitted_at': datetime(1999, 12, 31, 23, 59, 59, tzinfo=UTC), 'waited_for': waited_for } ) def sendCompletionMessage(self, bsid, results=3): self.master.mq.callConsumer(('buildsets', str(bsid), 'complete'), dict( bsid=bsid, submitted_at=100, complete=True, complete_at=200, external_idstring=None, reason='triggering', results=results, sourcestamps=[], parent_buildid=None, parent_relationship=None, )) # tests # NOTE: these tests take advantage of the fact that all of the fake # scheduler operations are synchronous, and thus do not return a Deferred. # The Deferred from trigger() is completely processed before this test # method returns. def test_constructor_no_reason(self): sched = self.makeScheduler() self.assertEqual( sched.reason, None) # default reason is dynamic def test_constructor_explicit_reason(self): sched = self.makeScheduler(reason="Because I said so") self.assertEqual(sched.reason, "Because I said so") def test_trigger(self): sched = self.makeScheduler(codebases={'cb': {'repository': 'r'}}) # no subscription should be in place yet self.assertEqual(sched.master.mq.qrefs, []) # trigger the scheduler, exercising properties while we're at it waited_for = True set_props = properties.Properties() set_props.setProperty('pr', 'op', 'test') ss = {'revision': 'myrev', 'branch': 'br', 'project': 'p', 'repository': 'r', 'codebase': 'cb'} idsDeferred, d = sched.trigger( waited_for, sourcestamps=[ss], set_props=set_props) self.reactor.advance(0) # let the debounced function fire self.assertTriggeredBuildset( idsDeferred, waited_for, properties={'pr': ('op', 'test')}, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev'), ]) # set up a boolean so that we can know when the deferred fires self.fired = False @d.addCallback def fired(xxx_todo_changeme): (result, brids) = xxx_todo_changeme self.assertEqual(result, 3) # from sendCompletionMessage self.assertEqual(brids, {77: 1000}) self.fired = True d.addErrback(log.err) # check that the scheduler has subscribed to buildset changes, but # not fired yet self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) self.assertFalse(self.fired) # pretend a non-matching buildset is complete self.sendCompletionMessage(27) # scheduler should not have reacted self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) self.assertFalse(self.fired) # pretend the matching buildset is complete self.sendCompletionMessage(200) self.reactor.advance(0) # let the debounced function fire # scheduler should have reacted self.assertEqual( [q.filter for q in sched.master.mq.qrefs], []) self.assertTrue(self.fired) return d def test_trigger_overlapping(self): sched = self.makeScheduler(codebases={'cb': {'repository': 'r'}}) # no subscription should be in place yet self.assertEqual(sched.master.mq.qrefs, []) waited_for = False def makeSS(rev): return {'revision': rev, 'branch': 'br', 'project': 'p', 'repository': 'r', 'codebase': 'cb'} # trigger the scheduler the first time idsDeferred, d = sched.trigger( waited_for, [makeSS('myrev1')]) # triggers bsid 200 self.assertTriggeredBuildset( idsDeferred, waited_for, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev1'), ]) d.addCallback(lambda res_brids: self.assertEqual(res_brids[0], 11) and self.assertEqual(res_brids[1], {77: 1000})) waited_for = True # and the second time idsDeferred, d = sched.trigger( waited_for, [makeSS('myrev2')]) # triggers bsid 201 self.reactor.advance(0) # let the debounced function fire self.assertTriggeredBuildset( idsDeferred, waited_for, sourcestamps=[ dict(branch='br', project='p', repository='r', codebase='cb', revision='myrev2'), ]) d.addCallback(lambda res_brids1: self.assertEqual(res_brids1[0], 22) and self.assertEqual(res_brids1[1], {77: 1001})) # check that the scheduler has subscribed to buildset changes self.assertEqual( [q.filter for q in sched.master.mq.qrefs], [('buildsets', None, 'complete',)]) # let a few buildsets complete self.sendCompletionMessage(29, results=3) self.sendCompletionMessage(201, results=22) self.sendCompletionMessage(9, results=3) self.sendCompletionMessage(200, results=11) self.reactor.advance(0) # let the debounced function fire # both should have triggered with appropriate results, and the # subscription should be cancelled self.assertEqual(sched.master.mq.qrefs, []) @defer.inlineCallbacks def test_trigger_with_sourcestamp(self): # Test triggering a scheduler with a sourcestamp, and see that # sourcestamp handed to addBuildsetForSourceStampsWithDefaults. sched = self.makeScheduler(overrideBuildsetMethods=True) waited_for = False ss = {'repository': 'r3', 'codebase': 'cb3', 'revision': 'fixrev3', 'branch': 'default', 'project': 'p'} idsDeferred = sched.trigger(waited_for, sourcestamps=[ss])[0] yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler')}, 'reason': "The Triggerable scheduler named 'n' triggered " "this build", 'sourcestamps': [{ 'branch': 'default', 'codebase': 'cb3', 'project': 'p', 'repository': 'r3', 'revision': 'fixrev3'}, ], 'waited_for': False}), ]) @defer.inlineCallbacks def test_trigger_without_sourcestamps(self): # Test triggering *without* sourcestamps, and see that nothing is passed # to addBuildsetForSourceStampsWithDefaults waited_for = True sched = self.makeScheduler(overrideBuildsetMethods=True) idsDeferred = sched.trigger(waited_for, sourcestamps=[])[0] yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler')}, 'reason': "The Triggerable scheduler named 'n' triggered " "this build", 'sourcestamps': [], 'waited_for': True}), ]) @defer.inlineCallbacks def test_trigger_with_reason(self): # Test triggering with a reason, and make sure the buildset's reason is updated accordingly # (and not the default) waited_for = True sched = self.makeScheduler(overrideBuildsetMethods=True) set_props = properties.Properties() set_props.setProperty('reason', 'test1', 'test') idsDeferred, d = sched.trigger( waited_for, sourcestamps=[], set_props=set_props) yield idsDeferred self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStampsWithDefaults', { 'builderNames': None, 'properties': {'scheduler': ('n', 'Scheduler'), 'reason': ('test1', 'test')}, 'reason': "test1", 'sourcestamps': [], 'waited_for': True}), ]) @defer.inlineCallbacks def test_startService_stopService(self): sched = self.makeScheduler() yield sched.startService() yield sched.stopService() buildbot-2.6.0/master/buildbot/test/unit/test_schedulers_trysched.py000066400000000000000000001001451361162603000260360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import shutil import sys import mock import twisted from twisted.internet import defer from twisted.protocols import basic from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot.schedulers import trysched from buildbot.test.util import dirs from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin class TryBase(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 26 SCHEDULERID = 6 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): sched = self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID) # Try will return a remote version of master.status, so give it # something to return sched.master.status = mock.Mock() return sched def test_filterBuilderList_ok(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b', 'c'], properties={}) self.assertEqual(sched.filterBuilderList(['b', 'c']), ['b', 'c']) def test_filterBuilderList_bad(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b'], properties={}) self.assertEqual(sched.filterBuilderList(['b', 'c']), []) def test_filterBuilderList_empty(self): sched = trysched.TryBase( name='tsched', builderNames=['a', 'b'], properties={}) self.assertEqual(sched.filterBuilderList([]), ['a', 'b']) @defer.inlineCallbacks def test_enabled_callback(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) expectedValue = not sched.enabled yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, expectedValue) @defer.inlineCallbacks def test_disabled_activate(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.activate() self.assertEqual(r, None) @defer.inlineCallbacks def test_disabled_deactivate(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) yield sched._enabledCallback(None, {'enabled': not sched.enabled}) self.assertEqual(sched.enabled, False) r = yield sched.deactivate() self.assertEqual(r, None) class JobdirService(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.jobdir = 'jobdir' self.newdir = os.path.join(self.jobdir, 'new') self.curdir = os.path.join(self.jobdir, 'cur') self.tmpdir = os.path.join(self.jobdir, 'tmp') self.setUpDirs(self.jobdir, self.newdir, self.curdir, self.tmpdir) def tearDown(self): self.tearDownDirs() def test_messageReceived(self): # stub out svc.scheduler.handleJobFile and .jobdir scheduler = mock.Mock() def handleJobFile(filename, f): self.assertEqual(filename, 'jobdata') self.assertEqual(f.read(), 'JOBDATA') scheduler.handleJobFile = handleJobFile scheduler.jobdir = self.jobdir svc = trysched.JobdirService(scheduler=scheduler, basedir=self.jobdir) # create some new data to process jobdata = os.path.join(self.newdir, 'jobdata') with open(jobdata, "w") as f: f.write('JOBDATA') # run it svc.messageReceived('jobdata') class Try_Jobdir(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 23 SCHEDULERID = 3 def setUp(self): self.setUpTestReactor() self.setUpScheduler() self.jobdir = None def tearDown(self): self.tearDownScheduler() if self.jobdir: shutil.rmtree(self.jobdir) # tests def setup_test_startService(self, jobdir, exp_jobdir): # set up jobdir self.jobdir = os.path.abspath('jobdir') if os.path.exists(self.jobdir): shutil.rmtree(self.jobdir) os.mkdir(self.jobdir) # build scheduler kwargs = dict(name="tsched", builderNames=['a'], jobdir=self.jobdir) sched = self.attachScheduler( trysched.Try_Jobdir(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True) # watch interaction with the watcher service sched.watcher.startService = mock.Mock() sched.watcher.stopService = mock.Mock() @defer.inlineCallbacks def do_test_startService(self): # start it yield self.sched.startService() # check that it has set the basedir correctly self.assertEqual(self.sched.watcher.basedir, self.jobdir) self.assertEqual(1, self.sched.watcher.startService.call_count) self.assertEqual(0, self.sched.watcher.stopService.call_count) yield self.sched.stopService() self.assertEqual(1, self.sched.watcher.startService.call_count) self.assertEqual(1, self.sched.watcher.stopService.call_count) def test_startService_reldir(self): self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir')) return self.do_test_startService() def test_startService_reldir_subdir(self): self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir/cur')) return self.do_test_startService() def test_startService_absdir(self): self.setup_test_startService( os.path.abspath('jobdir'), os.path.abspath('jobdir')) return self.do_test_startService() @defer.inlineCallbacks def do_test_startService_but_not_active(self, jobdir, exp_jobdir): """Same as do_test_startService, but the master wont activate this service""" self.setup_test_startService( 'jobdir', os.path.abspath('basedir/jobdir')) self.setSchedulerToMaster(self.OTHER_MASTER_ID) # start it self.sched.startService() # check that it has set the basedir correctly, even if it doesn't start self.assertEqual(self.sched.watcher.basedir, self.jobdir) yield self.sched.stopService() self.assertEqual(0, self.sched.watcher.startService.call_count) self.assertEqual(0, self.sched.watcher.stopService.call_count) # parseJob def test_parseJob_empty(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(NativeStringIO('')) def test_parseJob_longer_than_netstring_MAXLENGTH(self): self.patch(basic.NetstringReceiver, 'MAX_LENGTH', 100) sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) jobstr += 'x' * 200 test_temp_file = NativeStringIO(jobstr) with self.assertRaises(trysched.BadJobfile): sched.parseJob(test_temp_file) def test_parseJob_invalid(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(NativeStringIO('this is not a netstring')) def test_parseJob_invalid_version(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['a'], jobdir='foo') with self.assertRaises(trysched.BadJobfile): sched.parseJob(NativeStringIO('1:9,')) def makeNetstring(self, *strings): return ''.join(['%d:%s,' % (len(s), s) for s in strings]) def test_parseJob_v1(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': '', 'who': '', 'comment': '', 'repository': '', 'properties': {}, }) def test_parseJob_v1_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v1_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v1_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '1', 'extid', '', '', '1', 'this is my diff, -- ++, etc.' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v2(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': '', 'comment': '', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v2_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '2', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v2_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v2_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '2', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v3(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': '', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v3_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '3', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v3_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v3_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '3', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v4(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': 'comment', 'repository': 'repo', 'properties': {}, }) def test_parseJob_v4_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '4', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v4_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v4_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '4', 'extid', 'trunk', '1234', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v5(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': 1, 'patch_body': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderc'], 'properties': {'foo': 'bar'}, })) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': 'comment', 'repository': 'repo', 'properties': {'foo': 'bar'}, }) def test_parseJob_v5_empty_branch_rev(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( # blank branch, rev are turned to None '4', 'extid', '', '', '1', 'this is my diff, -- ++, etc.', 'repo', 'proj', 'who', 'comment', 'buildera', 'builderc' ) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['branch'], None) self.assertEqual(parsedjob['baserev'], None) def test_parseJob_v5_no_builders(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': '1', 'diff': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': [], 'properties': {'foo': 'bar'}, })) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['builderNames'], []) def test_parseJob_v5_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': '1', 'diff': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderb'], 'properties': {}, })) parsedjob = sched.parseJob(NativeStringIO(jobstr)) self.assertEqual(parsedjob['properties'], {}) def test_parseJob_v5_invalid_json(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring('5', '{"comment": "com}') with self.assertRaises(trysched.BadJobfile): sched.parseJob(NativeStringIO(jobstr)) # handleJobFile def call_handleJobFile(self, parseJob): sched = self.attachScheduler( trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo'), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) fakefile = mock.Mock() def parseJob_(f): assert f is fakefile return parseJob(f) sched.parseJob = parseJob_ return defer.maybeDeferred(sched.handleJobFile, 'fakefile', fakefile) def makeSampleParsedJob(self, **overrides): pj = dict(baserev='1234', branch='trunk', builderNames=['buildera', 'builderb'], jobid='extid', patch_body='this is my diff, -- ++, etc.', patch_level=1, project='proj', repository='repo', who='who', comment='comment', properties={}) pj.update(overrides) return pj @defer.inlineCallbacks def test_handleJobFile(self): yield self.call_handleJobFile(lambda f: self.makeSampleParsedJob()) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera', 'builderb'], external_idstring='extid', properties={}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body='this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) @defer.inlineCallbacks def test_handleJobFile_exception(self): def parseJob(f): raise trysched.BadJobfile yield self.call_handleJobFile(parseJob) self.assertEqual(self.addBuildsetCalls, []) self.assertEqual( 1, len(self.flushLoggedErrors(trysched.BadJobfile))) if twisted.version.major <= 9 and sys.version_info[:2] >= (2, 7): test_handleJobFile_exception.skip = ( "flushLoggedErrors does not work correctly on 9.0.0 " "and earlier with Python-2.7") @defer.inlineCallbacks def test_handleJobFile_bad_builders(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(builderNames=['xxx'])) self.assertEqual(self.addBuildsetCalls, []) @defer.inlineCallbacks def test_handleJobFile_subset_builders(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(builderNames=['buildera'])) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera'], external_idstring='extid', properties={}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body='this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) @defer.inlineCallbacks def test_handleJobFile_with_try_properties(self): yield self.call_handleJobFile( lambda f: self.makeSampleParsedJob(properties={'foo': 'bar'})) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['buildera', 'builderb'], external_idstring='extid', properties={'foo': ('bar', 'try build')}, reason="'try' job by user who", sourcestamps=[ dict( branch='trunk', codebase='', patch_author='who', patch_body='this is my diff, -- ++, etc.', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='1234'), ])), ]) def test_handleJobFile_with_invalid_try_properties(self): d = self.call_handleJobFile( lambda f: self.makeSampleParsedJob(properties=['foo', 'bar'])) return self.assertFailure(d, AttributeError) class Try_Userpass_Perspective(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 26 SCHEDULERID = 6 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): sched = self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=True, createBuilderDB=True) # Try will return a remote version of master.status, so give it # something to return sched.master.status = mock.Mock() return sched @defer.inlineCallbacks def call_perspective_try(self, *args, **kwargs): sched = self.makeScheduler(name='tsched', builderNames=['a', 'b'], port='xxx', userpass=[('a', 'b')], properties=dict(frm='schd')) persp = trysched.Try_Userpass_Perspective(sched, 'a') # patch out all of the handling after addBuildsetForSourceStamp def getBuildset(bsid): return dict(bsid=bsid) self.db.buildsets.getBuildset = getBuildset rbss = yield persp.perspective_try(*args, **kwargs) if rbss is None: return self.assertIsInstance(rbss, trysched.RemoteBuildSetStatus) @defer.inlineCallbacks def test_perspective_try(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['a'], properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['a'], external_idstring=None, properties={'pr': ('op', 'try build')}, reason="'try' job", sourcestamps=[ dict( branch='default', codebase='', patch_author='', patch_body='-- ++', patch_comment='', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='abcdef'), ])), ]) @defer.inlineCallbacks def test_perspective_try_who(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['a'], who='who', comment='comment', properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForSourceStamps', dict( builderNames=['a'], external_idstring=None, properties={'pr': ('op', 'try build')}, reason="'try' job by user who (comment)", sourcestamps=[ dict( branch='default', codebase='', patch_author='who', patch_body='-- ++', patch_comment='comment', patch_level=1, patch_subdir='', project='proj', repository='repo', revision='abcdef'), ])), ]) @defer.inlineCallbacks def test_perspective_try_bad_builders(self): yield self.call_perspective_try( 'default', 'abcdef', (1, '-- ++'), 'repo', 'proj', ['xxx'], properties={'pr': 'op'}) self.assertEqual(self.addBuildsetCalls, []) @defer.inlineCallbacks def test_getAvailableBuilderNames(self): sched = self.makeScheduler(name='tsched', builderNames=['a', 'b'], port='xxx', userpass=[('a', 'b')]) persp = trysched.Try_Userpass_Perspective(sched, 'a') buildernames = yield defer.maybeDeferred( persp.perspective_getAvailableBuilderNames) self.assertEqual(buildernames, ['a', 'b']) class Try_Userpass(scheduler.SchedulerMixin, TestReactorMixin, unittest.TestCase): OBJECTID = 25 SCHEDULERID = 5 def setUp(self): self.setUpTestReactor() self.setUpScheduler() def tearDown(self): self.tearDownScheduler() def makeScheduler(self, **kwargs): sched = self.attachScheduler(trysched.Try_Userpass(**kwargs), self.OBJECTID, self.SCHEDULERID) return sched @defer.inlineCallbacks def test_service(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) # patch out the pbmanager's 'register' command both to be sure # the registration is correct and to get a copy of the factory registration = mock.Mock() registration.unregister = lambda: defer.succeed(None) sched.master.pbmanager = mock.Mock() def register(portstr, user, passwd, factory): self.assertEqual([portstr, user, passwd], ['tcp:9999', 'fred', 'derf']) self.got_factory = factory return defer.succeed(registration) sched.master.pbmanager.register = register # start it yield sched.startService() # make a fake connection by invoking the factory, and check that we # get the correct perspective persp = self.got_factory(mock.Mock(), 'fred') self.assertTrue(isinstance(persp, trysched.Try_Userpass_Perspective)) yield sched.stopService() @defer.inlineCallbacks def test_service_but_not_active(self): sched = self.makeScheduler(name='tsched', builderNames=['a'], port='tcp:9999', userpass=[('fred', 'derf')]) self.setSchedulerToMaster(self.OTHER_MASTER_ID) sched.master.pbmanager = mock.Mock() sched.startService() yield sched.stopService() self.assertFalse(sched.master.pbmanager.register.called) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_base.py000066400000000000000000000355121361162603000244560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import errno import os import string import textwrap from twisted.python import runtime from twisted.python import usage from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot import config as config_module from buildbot.scripts import base from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import skipUnlessPlatformIs class TestIBD(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test') self.stdout = NativeStringIO() self.setUpStdoutAssertions() def test_isBuildmasterDir_no_dir(self): self.assertFalse(base.isBuildmasterDir(os.path.abspath('test/nosuch'))) self.assertInStdout('error reading') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_no_file(self): self.assertFalse(base.isBuildmasterDir(os.path.abspath('test'))) self.assertInStdout('error reading') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_no_Application(self): # Loading of pre-0.9.0 buildbot.tac file should fail. with open(os.path.join('test', 'buildbot.tac'), 'w') as f: f.write("foo\nx = Application('buildslave')\nbar") self.assertFalse(base.isBuildmasterDir(os.path.abspath('test'))) self.assertInStdout('unexpected content') self.assertInStdout('invalid buildmaster directory') def test_isBuildmasterDir_matches(self): with open(os.path.join('test', 'buildbot.tac'), 'w') as f: f.write("foo\nx = Application('buildmaster')\nbar") self.assertTrue(base.isBuildmasterDir(os.path.abspath('test'))) self.assertWasQuiet() class TestTacFallback(dirs.DirsMixin, unittest.TestCase): """ Tests for L{base.getConfigFileFromTac}. """ def setUp(self): """ Create a base directory. """ self.basedir = os.path.abspath('basedir') return self.setUpDirs('basedir') def _createBuildbotTac(self, contents=None): """ Create a C{buildbot.tac} that points to a given C{configfile} and create that file. @param configfile: Config file to point at and create. @type configfile: L{str} """ if contents is None: contents = '#dummy' tacfile = os.path.join(self.basedir, "buildbot.tac") with open(tacfile, "wt") as f: f.write(contents) return tacfile def test_getConfigFileFromTac(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac}, it reads the location of the config file from there. """ self._createBuildbotTac("configfile='other.cfg'") foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, "other.cfg") def test_getConfigFileFromTac_fallback(self): """ When L{getConfigFileFromTac} is passed a C{basedir} which doesn't contain a C{buildbot.tac}, it returns C{master.cfg} """ foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, 'master.cfg') def test_getConfigFileFromTac_tacWithoutConfigFile(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac}, but C{buildbot.tac} doesn't define C{configfile}, L{getConfigFileFromTac} returns C{master.cfg} """ self._createBuildbotTac() foundConfigFile = base.getConfigFileFromTac( basedir=self.basedir) self.assertEqual(foundConfigFile, 'master.cfg') def test_getConfigFileFromTac_usingFile(self): """ When L{getConfigFileFromTac} is passed a C{basedir} containing a C{buildbot.tac} which references C{__file__}, that reference points to C{buildbot.tac}. """ self._createBuildbotTac(textwrap.dedent(""" from twisted.python.util import sibpath configfile = sibpath(__file__, "relative.cfg") """)) foundConfigFile = base.getConfigFileFromTac(basedir=self.basedir) self.assertEqual( foundConfigFile, os.path.join(self.basedir, "relative.cfg")) class TestSubcommandOptions(unittest.TestCase): def fakeOptionsFile(self, **kwargs): self.patch(base.SubcommandOptions, 'loadOptionsFile', lambda self: kwargs.copy()) def parse(self, cls, *args): self.opts = cls() self.opts.parseOptions(args) return self.opts class Bare(base.SubcommandOptions): optFlags = [['foo', 'f', 'Foo!']] def test_bare_subclass(self): self.fakeOptionsFile() opts = self.parse(self.Bare, '-f') self.assertTrue(opts['foo']) class ParamsAndOptions(base.SubcommandOptions): optParameters = [['volume', 'v', '5', 'How Loud?']] buildbotOptions = [['volcfg', 'volume']] def test_buildbotOptions(self): self.fakeOptionsFile() opts = self.parse(self.ParamsAndOptions) self.assertEqual(opts['volume'], '5') def test_buildbotOptions_options(self): self.fakeOptionsFile(volcfg='3') opts = self.parse(self.ParamsAndOptions) self.assertEqual(opts['volume'], '3') def test_buildbotOptions_override(self): self.fakeOptionsFile(volcfg='3') opts = self.parse(self.ParamsAndOptions, '--volume', '7') self.assertEqual(opts['volume'], '7') class RequiredOptions(base.SubcommandOptions): optParameters = [['volume', 'v', None, 'How Loud?']] requiredOptions = ['volume'] def test_requiredOptions(self): self.fakeOptionsFile() with self.assertRaises(usage.UsageError): self.parse(self.RequiredOptions) class TestLoadOptionsFile(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test', 'home') self.opts = base.SubcommandOptions() self.dir = os.path.abspath('test') self.home = os.path.abspath('home') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def do_loadOptionsFile(self, _here, exp): # only patch these os.path functions briefly, to # avoid breaking other parts of the test system patches = [] if runtime.platformType == 'win32': from win32com.shell import shell patches.append(self.patch(shell, 'SHGetFolderPath', lambda *args: self.home)) else: def expanduser(p): return p.replace('~/', self.home + '/') patches.append(self.patch(os.path, 'expanduser', expanduser)) old_dirname = os.path.dirname def dirname(p): # bottom out at self.dir, rather than / if p == self.dir: return p return old_dirname(p) patches.append(self.patch(os.path, 'dirname', dirname)) try: self.assertEqual(self.opts.loadOptionsFile(_here=_here), exp) finally: for p in patches: p.restore() def writeOptionsFile(self, dir, content, bbdir='.buildbot'): os.makedirs(os.path.join(dir, bbdir)) with open(os.path.join(dir, bbdir, 'options'), 'w') as f: f.write(content) def test_loadOptionsFile_subdirs_not_found(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.do_loadOptionsFile(_here=subdir, exp={}) def test_loadOptionsFile_subdirs_at_root(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.writeOptionsFile(self.dir, 'abc="def"') self.writeOptionsFile(self.home, 'abc=123') # not seen self.do_loadOptionsFile(_here=subdir, exp={'abc': 'def'}) def test_loadOptionsFile_subdirs_at_tip(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) self.writeOptionsFile(os.path.join(self.dir, 'a', 'b'), 'abc="def"') self.writeOptionsFile(self.dir, 'abc=123') # not seen self.do_loadOptionsFile(_here=subdir, exp={'abc': 'def'}) def test_loadOptionsFile_subdirs_at_homedir(self): subdir = os.path.join(self.dir, 'a', 'b') os.makedirs(subdir) # on windows, the subdir of the home (well, appdata) dir # is 'buildbot', not '.buildbot' self.writeOptionsFile(self.home, 'abc=123', 'buildbot' if runtime.platformType == 'win32' else '.buildbot') self.do_loadOptionsFile(_here=subdir, exp={'abc': 123}) def test_loadOptionsFile_syntax_error(self): self.writeOptionsFile(self.dir, 'abc=abc') with self.assertRaises(NameError): self.do_loadOptionsFile(_here=self.dir, exp={}) self.assertInStdout('error while reading') def test_loadOptionsFile_toomany(self): subdir = os.path.join(self.dir, *tuple(string.ascii_lowercase)) os.makedirs(subdir) self.do_loadOptionsFile(_here=subdir, exp={}) self.assertInStdout('infinite glories') # NOTE: testing the ownership check requires patching os.stat, which causes # other problems since it is so heavily used. def mkconfig(**kwargs): config = dict(quiet=False, replace=False, basedir='test') config.update(kwargs) return config class TestLoadConfig(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('test') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def activeBasedir(self, extra_lines=()): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write("from twisted.application import service\n") f.write("service.Application('buildmaster')\n") f.write("\n".join(extra_lines)) def test_checkBasedir(self): self.activeBasedir() rv = base.checkBasedir(mkconfig()) self.assertTrue(rv) self.assertInStdout('checking basedir') def test_checkBasedir_quiet(self): self.activeBasedir() rv = base.checkBasedir(mkconfig(quiet=True)) self.assertTrue(rv) self.assertWasQuiet() def test_checkBasedir_no_dir(self): rv = base.checkBasedir(mkconfig(basedir='doesntexist')) self.assertFalse(rv) self.assertInStdout('invalid buildmaster directory') @skipUnlessPlatformIs('posix') def test_checkBasedir_active_pidfile(self): """ active PID file is giving error. """ self.activeBasedir() # write our own pid in the file with open(os.path.join('test', 'twistd.pid'), 'w') as f: f.write(str(os.getpid())) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('still running') @skipUnlessPlatformIs('posix') def test_checkBasedir_bad_pidfile(self): """ corrupted PID file is giving error. """ self.activeBasedir() with open(os.path.join('test', 'twistd.pid'), 'w') as f: f.write("xxx") rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('twistd.pid contains non-numeric value') @skipUnlessPlatformIs('posix') def test_checkBasedir_stale_pidfile(self): """ Stale PID file is removed without causing a system exit. """ self.activeBasedir() pidfile = os.path.join('test', 'twistd.pid') with open(pidfile, 'w') as f: f.write(str(os.getpid() + 1)) def kill(pid, sig): raise OSError(errno.ESRCH, "fake") self.patch(os, "kill", kill) rv = base.checkBasedir(mkconfig()) self.assertTrue(rv) self.assertInStdout('Removing stale pidfile test') self.assertFalse(os.path.exists(pidfile)) @skipUnlessPlatformIs('posix') def test_checkBasedir_pidfile_kill_error(self): """ if ping-killing the PID file does not work, we should error out. """ self.activeBasedir() # write our own pid in the file pidfile = os.path.join('test', 'twistd.pid') with open(pidfile, 'w') as f: f.write(str(os.getpid() + 1)) def kill(pid, sig): raise OSError(errno.EPERM, "fake") self.patch(os, "kill", kill) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('Can\'t check status of PID') self.assertTrue(os.path.exists(pidfile)) def test_checkBasedir_invalid_rotateLength(self): self.activeBasedir(extra_lines=['rotateLength="32"']) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('ERROR') self.assertInStdout('rotateLength') def test_checkBasedir_invalid_maxRotatedFiles(self): self.activeBasedir(extra_lines=['maxRotatedFiles="64"']) rv = base.checkBasedir(mkconfig()) self.assertFalse(rv) self.assertInStdout('ERROR') self.assertInStdout('maxRotatedFiles') def test_loadConfig(self): @classmethod def loadConfig(cls): return config_module.MasterConfig() self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIsInstance(cfg, config_module.MasterConfig) self.assertInStdout('checking') def test_loadConfig_ConfigErrors(self): @classmethod def loadConfig(cls): raise config_module.ConfigErrors(['oh noes']) self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIdentical(cfg, None) self.assertInStdout('oh noes') def test_loadConfig_exception(self): @classmethod def loadConfig(cls): raise RuntimeError() self.patch(config_module.FileLoader, 'loadConfig', loadConfig) cfg = base.loadConfig(mkconfig()) self.assertIdentical(cfg, None) self.assertInStdout('RuntimeError') buildbot-2.6.0/master/buildbot/test/unit/test_scripts_checkconfig.py000066400000000000000000000167151361162603000260130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys import textwrap import mock from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot.scripts import base from buildbot.scripts import checkconfig from buildbot.test.util import dirs class TestConfigLoader(dirs.DirsMixin, unittest.TestCase): def setUp(self): # config dir must be unique so that the python runtime does not optimize its list of module self.configdir = self.mktemp() return self.setUpDirs(self.configdir) def tearDown(self): return self.tearDownDirs() # tests def do_test_load(self, config='', other_files=None, stdout_re=None, stderr_re=None): if other_files is None: other_files = {} configFile = os.path.join(self.configdir, 'master.cfg') with open(configFile, "w") as f: f.write(config) for filename, contents in other_files.items(): if isinstance(filename, type(())): fn = os.path.join(self.configdir, *filename) dn = os.path.dirname(fn) if not os.path.isdir(dn): os.makedirs(dn) else: fn = os.path.join(self.configdir, filename) with open(fn, "w") as f: f.write(contents) old_stdout, old_stderr = sys.stdout, sys.stderr stdout = sys.stdout = NativeStringIO() stderr = sys.stderr = NativeStringIO() try: checkconfig._loadConfig( basedir=self.configdir, configFile="master.cfg", quiet=False) finally: sys.stdout, sys.stderr = old_stdout, old_stderr if stdout_re: stdout = stdout.getvalue() self.assertTrue(stdout_re.search(stdout), stdout) if stderr_re: stderr = stderr.getvalue() self.assertTrue(stderr_re.search(stderr), stderr) def test_success(self): len_sys_path = len(sys.path) config = textwrap.dedent("""\ c = BuildmasterConfig = {} c['multiMaster'] = True c['schedulers'] = [] from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory c['builders'] = [ BuilderConfig('testbuilder', factory=BuildFactory(), workername='worker'), ] from buildbot.worker import Worker c['workers'] = [ Worker('worker', 'pass'), ] c['protocols'] = {'pb': {'port': 9989}} """) self.do_test_load(config=config, stdout_re=re.compile('Config file is good!')) # (regression) check that sys.path hasn't changed self.assertEqual(len(sys.path), len_sys_path) def test_failure_ImportError(self): config = textwrap.dedent("""\ import test_scripts_checkconfig_does_not_exist """) # Python 3 displays this error: # No module named 'test_scripts_checkconfig_does_not_exist' # # Python 2 displays this error: # No module named test_scripts_checkconfig_does_not_exist # # We need a regexp that matches both. self.do_test_load(config=config, stderr_re=re.compile( "No module named '?test_scripts_checkconfig_does_not_exist'?")) self.flushLoggedErrors() def test_failure_no_workers(self): config = textwrap.dedent("""\ BuildmasterConfig={} """) self.do_test_load(config=config, stderr_re=re.compile('no workers')) self.flushLoggedErrors() def test_success_imports(self): config = textwrap.dedent("""\ from othermodule import port c = BuildmasterConfig = {} c['schedulers'] = [] c['builders'] = [] c['workers'] = [] c['protocols'] = {'pb': {'port': port}} """) other_files = {'othermodule.py': 'port = 9989'} self.do_test_load(config=config, other_files=other_files) def test_success_import_package(self): config = textwrap.dedent("""\ from otherpackage.othermodule import port c = BuildmasterConfig = {} c['schedulers'] = [] c['builders'] = [] c['workers'] = [] c['protocols'] = {'pb': {'port': 9989}} """) other_files = { ('otherpackage', '__init__.py'): '', ('otherpackage', 'othermodule.py'): 'port = 9989', } self.do_test_load(config=config, other_files=other_files) class TestCheckconfig(unittest.TestCase): def setUp(self): self.loadConfig = mock.Mock( spec=checkconfig._loadConfig, return_value=3) # checkconfig is decorated with @in_reactor, so strip that decoration # since the reactor is already running self.patch(checkconfig, 'checkconfig', checkconfig.checkconfig._orig) self.patch(checkconfig, '_loadConfig', self.loadConfig) def test_checkconfig_default(self): self.assertEqual(checkconfig.checkconfig(dict()), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=None) def test_checkconfig_given_dir(self): self.assertEqual(checkconfig.checkconfig(dict(configFile='.')), 3) self.loadConfig.assert_called_with(basedir='.', configFile='master.cfg', quiet=None) def test_checkconfig_given_file(self): config = dict(configFile='master.cfg') self.assertEqual(checkconfig.checkconfig(config), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=None) def test_checkconfig_quiet(self): config = dict(configFile='master.cfg', quiet=True) self.assertEqual(checkconfig.checkconfig(config), 3) self.loadConfig.assert_called_with(basedir=os.getcwd(), configFile='master.cfg', quiet=True) def test_checkconfig_syntaxError_quiet(self): """ When C{base.getConfigFileFromTac} raises L{SyntaxError}, C{checkconfig.checkconfig} return an error. """ mockGetConfig = mock.Mock(spec=base.getConfigFileFromTac, side_effect=SyntaxError) self.patch(checkconfig, 'getConfigFileFromTac', mockGetConfig) config = dict(configFile='.', quiet=True) self.assertEqual(checkconfig.checkconfig(config), 1) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_cleanupdb.py000066400000000000000000000151061361162603000254760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import textwrap import sqlalchemy as sa from twisted.internet import defer from twisted.trial import unittest from buildbot.scripts import cleanupdb from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.misc import TestReactorMixin from . import test_db_logs try: import lz4 [lz4] hasLz4 = True except ImportError: hasLz4 = False def mkconfig(**kwargs): config = dict(quiet=False, basedir=os.path.abspath('basedir'), force=True) config.update(kwargs) return config def patch_environ(case, key, value): """ Add an environment variable for the duration of a test. """ old_environ = os.environ.copy() def cleanup(): os.environ.clear() os.environ.update(old_environ) os.environ[key] = value case.addCleanup(cleanup) class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.origcwd = os.getcwd() self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write(textwrap.dedent(""" from twisted.application import service application = service.Application('buildmaster') """)) self.setUpStdoutAssertions() self.ensureNoSqliteMemory() def tearDown(self): os.chdir(self.origcwd) self.tearDownDirs() def ensureNoSqliteMemory(self): # test may use mysql or pg if configured in env envkey = "BUILDBOT_TEST_DB_URL" if envkey not in os.environ or os.environ[envkey] == 'sqlite://': patch_environ(self, envkey, "sqlite:///" + os.path.join( self.origcwd, "basedir", "state.sqlite")) def createMasterCfg(self, extraconfig=""): os.chdir(self.origcwd) with open(os.path.join('basedir', 'master.cfg'), 'wt') as f: f.write(textwrap.dedent(""" from buildbot.plugins import * c = BuildmasterConfig = dict() c['db_url'] = {dburl} c['buildbotNetUsageData'] = None c['multiMaster'] = True # don't complain for no builders {extraconfig} """.format(dburl=repr(os.environ["BUILDBOT_TEST_DB_URL"]), extraconfig=extraconfig))) @defer.inlineCallbacks def test_cleanup_not_basedir(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist')) self.assertEqual(res, 1) self.assertInStdout('invalid buildmaster directory') @defer.inlineCallbacks def test_cleanup_bad_config(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout("master.cfg' does not exist") @defer.inlineCallbacks def test_cleanup_bad_config2(self): self.createMasterCfg(extraconfig="++++ # syntaxerror") res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout( "encountered a SyntaxError while parsing config file:") # config logs an error via log.err, we must eat it or trial will # complain self.flushLoggedErrors() def assertDictAlmostEqual(self, d1, d2): # The test shows each methods return different size # but we still make a fuzzy comparison to resist if underlying libraries # improve efficiency self.assertEqual(len(d1), len(d2)) for k in d2.keys(): self.assertApproximates(d1[k], d2[k], 10) class TestCleanupDbRealDb(db.RealDatabaseWithConnectorMixin, TestCleanupDb): @defer.inlineCallbacks def setUp(self): super().setUp() table_names = [ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ] self.master = fakemaster.make_master(self, wantRealReactor=True) yield self.setUpRealDatabaseWithConnector(self.master, table_names=table_names) @defer.inlineCallbacks def tearDown(self): yield self.tearDownRealDatabaseWithConnector() @defer.inlineCallbacks def test_cleanup(self): # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.master.db.logs.addLog(102, "x", "x", "s") yield self.master.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.master.db.logs.COMPRESSION_MODE: if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, don't fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode,)) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.master.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.master.db.model.logchunks q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) return conn.execute(q).fetchone()[0] lengths[mode] = yield self.master.db.pool.do(thd) self.assertDictAlmostEqual( lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31}) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_create_master.py000066400000000000000000000220621361162603000263560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.db import connector from buildbot.db import model from buildbot.scripts import create_master from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin def mkconfig(**kwargs): config = dict(force=False, relocatable=False, config='master.cfg', db='sqlite:///state.sqlite', basedir=os.path.abspath('basedir'), quiet=False, **{'no-logrotate': False, 'log-size': 10000000, 'log-count': 10}) config.update(kwargs) return config class TestCreateMaster(misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): # createMaster is decorated with @in_reactor, so strip that decoration # since the master is already running self.patch(create_master, 'createMaster', create_master.createMaster._orig) self.setUpStdoutAssertions() # tests @defer.inlineCallbacks def do_test_createMaster(self, config): # mock out everything that createMaster calls, then check that # they are called, in order functions = ['makeBasedir', 'makeTAC', 'makeSampleConfig', 'createDB'] repls = {} calls = [] for fn in functions: repl = repls[fn] = mock.Mock(name=fn) repl.side_effect = lambda config, fn=fn: calls.append(fn) self.patch(create_master, fn, repl) repls['createDB'].side_effect = (lambda config: calls.append(fn) or defer.succeed(None)) rc = yield create_master.createMaster(config) self.assertEqual(rc, 0) self.assertEqual(calls, functions) for repl in repls.values(): repl.assert_called_with(config) @defer.inlineCallbacks def test_createMaster_quiet(self): yield self.do_test_createMaster(mkconfig(quiet=True)) self.assertWasQuiet() @defer.inlineCallbacks def test_createMaster_loud(self): yield self.do_test_createMaster(mkconfig(quiet=False)) self.assertInStdout('buildmaster configured in') class TestCreateMasterFunctions(www.WwwTestMixin, dirs.DirsMixin, misc.StdoutAssertionsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpDirs('test') self.basedir = os.path.abspath(os.path.join('test', 'basedir')) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def assertInTacFile(self, str): with open(os.path.join('test', 'buildbot.tac'), 'rt') as f: content = f.read() self.assertIn(str, content) def assertNotInTacFile(self, str): with open(os.path.join('test', 'buildbot.tac'), 'rt') as f: content = f.read() self.assertNotIn(str, content) def assertDBSetup(self, basedir=None, db_url='sqlite:///state.sqlite', verbose=True): # mock out the database setup self.db = mock.Mock() self.db.setup.side_effect = lambda *a, **k: defer.succeed(None) self.DBConnector = mock.Mock() self.DBConnector.return_value = self.db self.patch(connector, 'DBConnector', self.DBConnector) basedir = basedir or self.basedir # pylint: disable=unsubscriptable-object self.assertEqual( dict(basedir=self.DBConnector.call_args[0][1], db_url=self.DBConnector.call_args[0][0].mkconfig.db['db_url'], verbose=self.db.setup.call_args[1]['verbose'], check_version=self.db.setup.call_args[1]['check_version'], ), dict(basedir=self.basedir, db_url=db_url, verbose=True, check_version=False)) # tests def test_makeBasedir(self): self.assertFalse(os.path.exists(self.basedir)) create_master.makeBasedir(mkconfig(basedir=self.basedir)) self.assertTrue(os.path.exists(self.basedir)) self.assertInStdout('mkdir %s' % (self.basedir,)) def test_makeBasedir_quiet(self): self.assertFalse(os.path.exists(self.basedir)) create_master.makeBasedir(mkconfig(basedir=self.basedir, quiet=True)) self.assertTrue(os.path.exists(self.basedir)) self.assertWasQuiet() def test_makeBasedir_existing(self): os.mkdir(self.basedir) create_master.makeBasedir(mkconfig(basedir=self.basedir)) self.assertInStdout('updating existing installation') def test_makeTAC(self): create_master.makeTAC(mkconfig(basedir='test')) self.assertInTacFile("Application('buildmaster')") self.assertWasQuiet() def test_makeTAC_relocatable(self): create_master.makeTAC(mkconfig(basedir='test', relocatable=True)) self.assertInTacFile("basedir = '.'") # repr() prefers '' self.assertWasQuiet() def test_makeTAC_no_logrotate(self): create_master.makeTAC( mkconfig(basedir='test', **{'no-logrotate': True})) self.assertNotInTacFile("import Log") self.assertWasQuiet() def test_makeTAC_int_log_count(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': 30})) self.assertInTacFile("\nmaxRotatedFiles = 30\n") self.assertWasQuiet() def test_makeTAC_str_log_count(self): with self.assertRaises(TypeError): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': '30'})) def test_makeTAC_none_log_count(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-count': None})) self.assertInTacFile("\nmaxRotatedFiles = None\n") self.assertWasQuiet() def test_makeTAC_int_log_size(self): create_master.makeTAC(mkconfig(basedir='test', **{'log-size': 3000})) self.assertInTacFile("\nrotateLength = 3000\n") self.assertWasQuiet() def test_makeTAC_str_log_size(self): with self.assertRaises(TypeError): create_master.makeTAC(mkconfig(basedir='test', **{'log-size': '3000'})) def test_makeTAC_existing_incorrect(self): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write('WRONG') create_master.makeTAC(mkconfig(basedir='test')) self.assertInTacFile("WRONG") self.assertTrue(os.path.exists( os.path.join('test', 'buildbot.tac.new'))) self.assertInStdout('not touching existing buildbot.tac') def test_makeTAC_existing_incorrect_quiet(self): with open(os.path.join('test', 'buildbot.tac'), 'wt') as f: f.write('WRONG') create_master.makeTAC(mkconfig(basedir='test', quiet=True)) self.assertInTacFile("WRONG") self.assertWasQuiet() def test_makeTAC_existing_correct(self): create_master.makeTAC(mkconfig(basedir='test', quiet=True)) create_master.makeTAC(mkconfig(basedir='test')) self.assertFalse(os.path.exists( os.path.join('test', 'buildbot.tac.new'))) self.assertInStdout('and is correct') def test_makeSampleConfig(self): create_master.makeSampleConfig(mkconfig(basedir='test')) self.assertTrue(os.path.exists( os.path.join('test', 'master.cfg.sample'))) self.assertInStdout('creating ') def test_makeSampleConfig_db(self): create_master.makeSampleConfig(mkconfig(basedir='test', db='XXYYZZ', quiet=True)) with open(os.path.join('test', 'master.cfg.sample'), 'rt') as f: self.assertIn("XXYYZZ", f.read()) self.assertWasQuiet() @defer.inlineCallbacks def test_createDB(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) upgrade = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(model.Model, 'upgrade', upgrade) yield create_master.createDB( mkconfig(basedir='test', quiet=True), _noMonkey=True) setup.asset_called_with(check_version=False, verbose=False) upgrade.assert_called_with() self.assertWasQuiet() buildbot-2.6.0/master/buildbot/test/unit/test_scripts_logwatcher.py000066400000000000000000000074121361162603000257010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.scripts.logwatcher import BuildmasterStartupError from buildbot.scripts.logwatcher import BuildmasterTimeoutError from buildbot.scripts.logwatcher import LogWatcher from buildbot.scripts.logwatcher import ReconfigError from buildbot.test.util import dirs from buildbot.test.util.misc import TestReactorMixin class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): def setUp(self): self.setUpDirs('workdir') self.addCleanup(self.tearDownDirs) self.setUpTestReactor() self.spawned_process = mock.Mock() self.reactor.spawnProcess = mock.Mock(return_value=self.spawned_process) def test_start(self): lw = LogWatcher('workdir/test.log', _reactor=self.reactor) lw._start = mock.Mock() lw.start() self.reactor.spawnProcess.assert_called() self.assertTrue(os.path.exists('workdir/test.log')) self.assertTrue(lw.running) @defer.inlineCallbacks def test_success_before_timeout(self): lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_failure_after_timeout(self): lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(5.1) lw.lineReceived(b'BuildMaster is running') with self.assertRaises(BuildmasterTimeoutError): yield d @defer.inlineCallbacks def test_progress_restarts_timeout(self): lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'added builder') self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_matches_lines(self): lines_and_expected = [ (b'reconfig aborted without making any changes', ReconfigError()), (b'WARNING: reconfig partially applied; master may malfunction', ReconfigError()), (b'Server Shut Down', ReconfigError()), (b'BuildMaster startup failed', BuildmasterStartupError()), (b'message from master: attached', 'worker'), (b'configuration update complete', 'buildmaster'), (b'BuildMaster is running', 'buildmaster'), ] for line, expected in lines_and_expected: lw = LogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.lineReceived(line) if isinstance(expected, Exception): with self.assertRaises(type(expected)): yield d else: res = yield d self.assertEqual(res, expected) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_restart.py000066400000000000000000000052651361162603000252320ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.trial import unittest from buildbot.scripts import restart from buildbot.scripts import start from buildbot.scripts import stop from buildbot.test.util import dirs from buildbot.test.util import misc def mkconfig(**kwargs): config = dict(quiet=False, basedir=os.path.abspath('basedir')) config.update(kwargs) return config class TestStop(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write("Application('buildmaster')") self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def test_restart_not_basedir(self): self.assertEqual(restart.restart(mkconfig(basedir='doesntexist')), 1) self.assertInStdout('invalid buildmaster directory') def test_restart_stop_fails(self): self.patch(stop, 'stop', lambda config, wait: 1) self.assertEqual(restart.restart(mkconfig()), 1) def test_restart_stop_succeeds_start_fails(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 1) self.assertEqual(restart.restart(mkconfig()), 1) def test_restart_succeeds(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig()), 0) self.assertInStdout('now restarting') def test_restart_succeeds_quiet(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig(quiet=True)), 0) self.assertWasQuiet() def test_restart_clean(self): self.patch(stop, 'stop', lambda config, wait: 0) self.patch(start, 'start', lambda config: 0) self.assertEqual(restart.restart(mkconfig(quiet=True, clean=True)), 0) self.assertWasQuiet() buildbot-2.6.0/master/buildbot/test/unit/test_scripts_runner.py000066400000000000000000000726621361162603000250640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import getpass import os import sys import mock from twisted.python import log from twisted.python import runtime from twisted.python import usage from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot.scripts import base from buildbot.scripts import runner from buildbot.test.util import misc class OptionsMixin: def setUpOptions(self): self.options_file = {} self.patch(base.SubcommandOptions, 'loadOptionsFile', lambda other_self: self.options_file) def assertOptions(self, opts, exp): got = {k: opts[k] for k in exp} if got != exp: msg = [] for k in exp: if opts[k] != exp[k]: msg.append(" %s: expected %r, got %r" % (k, exp[k], opts[k])) self.fail("did not get expected options\n" + ("\n".join(msg))) class TestUpgradeMasterOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.UpgradeMasterOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.UpgradeMasterOptions() self.assertIn('buildbot upgrade-master', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False, replace=False) self.assertOptions(opts, exp) def test_short(self): opts = self.parse('-q', '-r') exp = dict(quiet=True, replace=True) self.assertOptions(opts, exp) def test_long(self): opts = self.parse('--quiet', '--replace') exp = dict(quiet=True, replace=True) self.assertOptions(opts, exp) class TestCreateMasterOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.CreateMasterOptions() self.opts.parseOptions(args) return self.opts def defaults_and(self, **kwargs): defaults = dict(force=False, relocatable=False, config='master.cfg', db='sqlite:///state.sqlite', basedir=os.getcwd(), quiet=False, **{'no-logrotate': False, 'log-size': 10000000, 'log-count': 10}) unk_keys = set(kwargs.keys()) - set(defaults.keys()) assert not unk_keys, "invalid keys %s" % (unk_keys,) opts = defaults.copy() opts.update(kwargs) return opts def test_synopsis(self): opts = runner.CreateMasterOptions() self.assertIn('buildbot create-master', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = self.defaults_and() self.assertOptions(opts, exp) def test_db_quiet(self): opts = self.parse('-q') exp = self.defaults_and(quiet=True) self.assertOptions(opts, exp) def test_db_quiet_long(self): opts = self.parse('--quiet') exp = self.defaults_and(quiet=True) self.assertOptions(opts, exp) def test_force(self): opts = self.parse('-f') exp = self.defaults_and(force=True) self.assertOptions(opts, exp) def test_force_long(self): opts = self.parse('--force') exp = self.defaults_and(force=True) self.assertOptions(opts, exp) def test_relocatable(self): opts = self.parse('-r') exp = self.defaults_and(relocatable=True) self.assertOptions(opts, exp) def test_relocatable_long(self): opts = self.parse('--relocatable') exp = self.defaults_and(relocatable=True) self.assertOptions(opts, exp) def test_no_logrotate(self): opts = self.parse('-n') exp = self.defaults_and(**{'no-logrotate': True}) self.assertOptions(opts, exp) def test_no_logrotate_long(self): opts = self.parse('--no-logrotate') exp = self.defaults_and(**{'no-logrotate': True}) self.assertOptions(opts, exp) def test_config(self): opts = self.parse('-cxyz') exp = self.defaults_and(config='xyz') self.assertOptions(opts, exp) def test_config_long(self): opts = self.parse('--config=xyz') exp = self.defaults_and(config='xyz') self.assertOptions(opts, exp) def test_log_size(self): opts = self.parse('-s124') exp = self.defaults_and(**{'log-size': 124}) self.assertOptions(opts, exp) def test_log_size_long(self): opts = self.parse('--log-size=124') exp = self.defaults_and(**{'log-size': 124}) self.assertOptions(opts, exp) def test_log_size_noninteger(self): with self.assertRaises(usage.UsageError): self.parse('--log-size=1M') def test_log_count(self): opts = self.parse('-l124') exp = self.defaults_and(**{'log-count': 124}) self.assertOptions(opts, exp) def test_log_count_long(self): opts = self.parse('--log-count=124') exp = self.defaults_and(**{'log-count': 124}) self.assertOptions(opts, exp) def test_log_count_none(self): opts = self.parse('--log-count=None') exp = self.defaults_and(**{'log-count': None}) self.assertOptions(opts, exp) def test_log_count_noninteger(self): with self.assertRaises(usage.UsageError): self.parse('--log-count=M') def test_db_long(self): opts = self.parse('--db=foo://bar') exp = self.defaults_and(db='foo://bar') self.assertOptions(opts, exp) def test_db_invalid(self): with self.assertRaisesRegex(usage.UsageError, "could not parse database URL 'inv_db_url'"): self.parse("--db=inv_db_url") def test_db_basedir(self): path = r'c:\foo\bar' if runtime.platformType == "win32" else '/foo/bar' opts = self.parse('-f', path) exp = self.defaults_and(force=True, basedir=path) self.assertOptions(opts, exp) class BaseTestSimpleOptions(OptionsMixin): # tests for options with just --quiet and a usage message commandName = None optionsClass = None def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = self.optionsClass() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = self.optionsClass() self.assertIn('buildbot %s' % self.commandName, opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False) self.assertOptions(opts, exp) def test_quiet(self): opts = self.parse('--quiet') exp = dict(quiet=True) self.assertOptions(opts, exp) class TestStopOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'stop' optionsClass = runner.StopOptions class TestResetartOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'restart' optionsClass = runner.RestartOptions def test_nodaemon(self): opts = self.parse('--nodaemon') exp = dict(nodaemon=True) self.assertOptions(opts, exp) class TestStartOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'start' optionsClass = runner.StartOptions def test_nodaemon(self): opts = self.parse('--nodaemon') exp = dict(nodaemon=True) self.assertOptions(opts, exp) class TestReconfigOptions(BaseTestSimpleOptions, unittest.TestCase): commandName = 'reconfig' optionsClass = runner.ReconfigOptions class TestTryOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.TryOptions() self.opts.parseOptions(args) return self.opts def defaults_and(self, **kwargs): defaults = dict(connect=None, host=None, jobdir=None, username=None, master=None, passwd=None, who=None, comment=None, diff=None, patchlevel=0, baserev=None, vc=None, branch=None, repository=None, topfile=None, topdir=None, wait=False, dryrun=False, quiet=False, builders=[], properties={}, buildbotbin='buildbot') # dashes make python syntax hard.. defaults['get-builder-names'] = False if 'get_builder_names' in kwargs: kwargs['get-builder-names'] = kwargs['get_builder_names'] del kwargs['get_builder_names'] assert set(kwargs.keys()) <= set(defaults.keys()), "invalid keys" opts = defaults.copy() opts.update(kwargs) return opts def test_synopsis(self): opts = runner.TryOptions() self.assertIn('buildbot try', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = self.defaults_and() self.assertOptions(opts, exp) def test_properties(self): opts = self.parse('--properties=a=b') exp = self.defaults_and(properties=dict(a='b')) self.assertOptions(opts, exp) def test_properties_multiple_opts(self): opts = self.parse('--properties=X=1', '--properties=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_properties_equals(self): opts = self.parse('--properties=X=2+2=4') exp = self.defaults_and(properties=dict(X='2+2=4')) self.assertOptions(opts, exp) def test_properties_commas(self): opts = self.parse('--properties=a=b,c=d') exp = self.defaults_and(properties=dict(a='b', c='d')) self.assertOptions(opts, exp) def test_property(self): opts = self.parse('--property=a=b') exp = self.defaults_and(properties=dict(a='b')) self.assertOptions(opts, exp) def test_property_multiple_opts(self): opts = self.parse('--property=X=1', '--property=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_property_equals(self): opts = self.parse('--property=X=2+2=4') exp = self.defaults_and(properties=dict(X='2+2=4')) self.assertOptions(opts, exp) def test_property_commas(self): opts = self.parse('--property=a=b,c=d') exp = self.defaults_and(properties=dict(a='b,c=d')) self.assertOptions(opts, exp) def test_property_and_properties(self): opts = self.parse('--property=X=1', '--properties=Y=2') exp = self.defaults_and(properties=dict(X='1', Y='2')) self.assertOptions(opts, exp) def test_properties_builders_multiple(self): opts = self.parse('--builder=aa', '--builder=bb') exp = self.defaults_and(builders=['aa', 'bb']) self.assertOptions(opts, exp) def test_options_short(self): opts = self.parse( *'-n -q -c pb -u me -m mr:7 -w you -C comm -p 2 -b bb'.split()) exp = self.defaults_and(dryrun=True, quiet=True, connect='pb', username='me', master='mr:7', who='you', comment='comm', patchlevel=2, builders=['bb']) self.assertOptions(opts, exp) def test_options_long(self): opts = self.parse( *"""--wait --dryrun --get-builder-names --quiet --connect=pb --host=h --jobdir=j --username=u --master=m:1234 --passwd=p --who=w --comment=comm --diff=d --patchlevel=7 --baserev=br --vc=cvs --branch=br --repository=rep --builder=bl --properties=a=b --topfile=Makefile --topdir=. --buildbotbin=.virtualenvs/buildbot/bin/buildbot""".split()) exp = self.defaults_and(wait=True, dryrun=True, get_builder_names=True, quiet=True, connect='pb', host='h', jobdir='j', username='u', master='m:1234', passwd='p', who='w', comment='comm', diff='d', patchlevel=7, baserev='br', vc='cvs', branch='br', repository='rep', builders=['bl'], properties=dict(a='b'), topfile='Makefile', topdir='.', buildbotbin='.virtualenvs/buildbot/bin/buildbot') self.assertOptions(opts, exp) def test_patchlevel_inval(self): with self.assertRaises(ValueError): self.parse('-p', 'a') def test_config_builders(self): self.options_file['try_builders'] = ['a', 'b'] opts = self.parse() self.assertOptions(opts, dict(builders=['a', 'b'])) def test_config_builders_override(self): self.options_file['try_builders'] = ['a', 'b'] opts = self.parse('-b', 'd') # overrides a, b self.assertOptions(opts, dict(builders=['d'])) def test_config_old_names(self): self.options_file['try_masterstatus'] = 'ms' self.options_file['try_dir'] = 'td' self.options_file['try_password'] = 'pw' opts = self.parse() self.assertOptions(opts, dict(master='ms', jobdir='td', passwd='pw')) def test_config_masterstatus(self): self.options_file['masterstatus'] = 'ms' opts = self.parse() self.assertOptions(opts, dict(master='ms')) def test_config_masterstatus_override(self): self.options_file['masterstatus'] = 'ms' opts = self.parse('-m', 'mm') self.assertOptions(opts, dict(master='mm')) def test_config_options(self): self.options_file.update(dict(try_connect='pb', try_vc='cvs', try_branch='br', try_repository='rep', try_topdir='.', try_topfile='Makefile', try_host='h', try_username='u', try_jobdir='j', try_password='p', try_master='m:8', try_who='w', try_comment='comm', try_quiet='y', try_wait='y', try_buildbotbin='.virtualenvs/buildbot/bin/buildbot')) opts = self.parse() exp = self.defaults_and(wait=True, quiet=True, connect='pb', host='h', jobdir='j', username='u', master='m:8', passwd='p', who='w', comment='comm', vc='cvs', branch='br', repository='rep', topfile='Makefile', topdir='.', buildbotbin='.virtualenvs/buildbot/bin/buildbot') self.assertOptions(opts, exp) def test_pb_withNoMaster(self): """ When 'builbot try' is asked to connect via pb, but no master is specified, a usage error is raised. """ with self.assertRaises(usage.UsageError): self.parse('--connect=pb') def test_pb_withInvalidMaster(self): """ When 'buildbot try' is asked to connect via pb, but an invalid master is specified, a usage error is raised. """ with self.assertRaises(usage.UsageError): self.parse('--connect=pb', '--master=foo') class TestSendChangeOptions(OptionsMixin, unittest.TestCase): master_and_who = ['-m', 'm:1', '-W', 'w'] def setUp(self): self.setUpOptions() self.getpass_response = 'typed-password' self.patch(getpass, 'getpass', lambda prompt: self.getpass_response) def parse(self, *args): self.opts = runner.SendChangeOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.SendChangeOptions() self.assertIn('buildbot sendchange', opts.getSynopsis()) def test_defaults(self): opts = self.parse('-m', 'm:1', '-W', 'me') exp = dict(master='m:1', auth=('change', 'changepw'), who='me', vc=None, repository='', project='', branch=None, category=None, revision=None, revision_file=None, property=None, comments='', logfile=None, when=None, revlink='', encoding='utf8', files=()) self.assertOptions(opts, exp) def test_files(self): opts = self.parse(*self.master_and_who + ['a', 'b', 'c']) self.assertEqual(opts['files'], ('a', 'b', 'c')) def test_properties(self): opts = self.parse('--property', 'x:y', '--property', 'a:b', *self.master_and_who) self.assertEqual(opts['properties'], dict(x="y", a="b")) def test_properties_with_colon(self): opts = self.parse('--property', 'x:http://foo', *self.master_and_who) self.assertEqual(opts['properties'], dict(x='http://foo')) def test_config_file(self): self.options_file['master'] = 'MMM:123' self.options_file['who'] = 'WWW' self.options_file['branch'] = 'BBB' self.options_file['category'] = 'CCC' self.options_file['vc'] = 'svn' opts = self.parse() exp = dict(master='MMM:123', who='WWW', branch='BBB', category='CCC', vc='svn') self.assertOptions(opts, exp) def test_short_args(self): opts = self.parse(*('-m m:1 -a a:b -W W -R r -P p -b b -s git ' + '-C c -r r -p pn:pv -c c -F f -w 123 -l l -e e').split()) exp = dict(master='m:1', auth=('a', 'b'), who='W', repository='r', project='p', branch='b', category='c', revision='r', vc='git', properties=dict(pn='pv'), comments='c', logfile='f', when=123.0, revlink='l', encoding='e') self.assertOptions(opts, exp) def test_long_args(self): opts = self.parse(*('--master m:1 --auth a:b --who w --repository r ' + '--project p --branch b --category c --revision r --vc git ' + '--property pn:pv --comments c --logfile f ' + '--when 123 --revlink l --encoding e').split()) exp = dict(master='m:1', auth=('a', 'b'), who='w', repository='r', project='p', branch='b', category='c', revision='r', vc='git', properties=dict(pn='pv'), comments='c', logfile='f', when=123.0, revlink='l', encoding='e') self.assertOptions(opts, exp) def test_revision_file(self): with open('revfile', 'wt') as f: f.write('my-rev') self.addCleanup(lambda: os.unlink('revfile')) opts = self.parse('--revision_file', 'revfile', *self.master_and_who) self.assertOptions(opts, dict(revision='my-rev')) def test_invalid_when(self): with self.assertRaises(usage.UsageError): self.parse('--when=foo', *self.master_and_who) def test_comments_overrides_logfile(self): opts = self.parse('--logfile', 'logs', '--comments', 'foo', *self.master_and_who) self.assertOptions(opts, dict(comments='foo')) def test_logfile(self): with open('comments', 'wt') as f: f.write('hi') self.addCleanup(lambda: os.unlink('comments')) opts = self.parse('--logfile', 'comments', *self.master_and_who) self.assertOptions(opts, dict(comments='hi')) def test_logfile_stdin(self): stdin = mock.Mock() stdin.read = lambda: 'hi' self.patch(sys, 'stdin', stdin) opts = self.parse('--logfile', '-', *self.master_and_who) self.assertOptions(opts, dict(comments='hi')) def test_auth_getpass(self): opts = self.parse('--auth=dustin', *self.master_and_who) self.assertOptions(opts, dict(auth=('dustin', 'typed-password'))) def test_invalid_vcs(self): with self.assertRaises(usage.UsageError): self.parse('--vc=foo', *self.master_and_who) def test_invalid_master(self): with self.assertRaises(usage.UsageError): self.parse("--who=test", "-m foo") class TestTryServerOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.TryServerOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.TryServerOptions() self.assertIn('buildbot tryserver', opts.getSynopsis()) def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_with_jobdir(self): opts = self.parse('--jobdir', 'xyz') exp = dict(jobdir='xyz') self.assertOptions(opts, exp) class TestCheckConfigOptions(OptionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.CheckConfigOptions() self.opts.parseOptions(args) return self.opts def test_synopsis(self): opts = runner.CheckConfigOptions() self.assertIn('buildbot checkconfig', opts.getSynopsis()) def test_defaults(self): opts = self.parse() exp = dict(quiet=False) self.assertOptions(opts, exp) def test_configfile(self): opts = self.parse('foo.cfg') exp = dict(quiet=False, configFile='foo.cfg') self.assertOptions(opts, exp) def test_quiet(self): opts = self.parse('-q') exp = dict(quiet=True) self.assertOptions(opts, exp) class TestUserOptions(OptionsMixin, unittest.TestCase): # mandatory arguments extra_args = ['--master', 'a:1', '--username', 'u', '--passwd', 'p'] def setUp(self): self.setUpOptions() def parse(self, *args): self.opts = runner.UserOptions() self.opts.parseOptions(args) return self.opts def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_synopsis(self): opts = runner.UserOptions() self.assertIn('buildbot user', opts.getSynopsis()) def test_master(self): opts = self.parse("--master", "abcd:1234", '--op=get', '--ids=x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master="abcd:1234")) def test_ids(self): opts = self.parse("--ids", "id1,id2,id3", '--op', 'get', *self.extra_args) self.assertEqual(opts['ids'], ['id1', 'id2', 'id3']) def test_info(self): opts = self.parse("--info", "git=Tyler Durden ", '--op', 'add', *self.extra_args) self.assertEqual(opts['info'], [dict(git='Tyler Durden ')]) def test_info_only_id(self): opts = self.parse("--info", "tdurden", '--op', 'update', *self.extra_args) self.assertEqual(opts['info'], [dict(identifier='tdurden')]) def test_info_with_id(self): opts = self.parse("--info", "tdurden:svn=marla", '--op', 'update', *self.extra_args) self.assertEqual( opts['info'], [dict(identifier='tdurden', svn='marla')]) def test_info_multiple(self): opts = self.parse("--info", "git=Tyler Durden ", "--info", "git=Narrator ", '--op', 'add', *self.extra_args) self.assertEqual(opts['info'], [dict(git='Tyler Durden '), dict(git='Narrator ')]) def test_config_user_params(self): self.options_file['user_master'] = 'mm:99' self.options_file['user_username'] = 'un' self.options_file['user_passwd'] = 'pw' opts = self.parse('--op', 'get', '--ids', 'x') self.assertOptions( opts, dict(master='mm:99', username='un', passwd='pw')) def test_config_master(self): self.options_file['master'] = 'mm:99' opts = self.parse('--op', 'get', '--ids', 'x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master='mm:99')) def test_config_master_override(self): self.options_file['master'] = 'not seen' self.options_file['user_master'] = 'mm:99' opts = self.parse('--op', 'get', '--ids', 'x', '--username=u', '--passwd=p') self.assertOptions(opts, dict(master='mm:99')) def test_invalid_info(self): with self.assertRaises(usage.UsageError): self.parse("--info", "foo=bar", '--op', 'add', *self.extra_args) def test_no_master(self): with self.assertRaises(usage.UsageError): self.parse('-op=foo') def test_invalid_master(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'foo') def test_no_operation(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1') def test_bad_operation(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1', '--op=mayhem') def test_no_username(self): with self.assertRaises(usage.UsageError): self.parse('-m', 'a:1', '--op=add') def test_no_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '-m', 'a:1', '-u', 'tdurden') def test_invalid_bb_username(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--bb_username=tdurden', *self.extra_args) def test_invalid_bb_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--bb_password=marla', *self.extra_args) def test_update_no_bb_username(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--bb_password=marla', *self.extra_args) def test_update_no_bb_password(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--bb_username=tdurden', *self.extra_args) def test_no_ids_info(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', *self.extra_args) def test_ids_with_add(self): with self.assertRaises(usage.UsageError): self.parse('--op=add', '--ids=id1', *self.extra_args) def test_ids_with_update(self): with self.assertRaises(usage.UsageError): self.parse('--op=update', '--ids=id1', *self.extra_args) def test_no_ids_found_update(self): with self.assertRaises(usage.UsageError): self.parse("--op=update", "--info=svn=x", *self.extra_args) def test_id_with_add(self): with self.assertRaises(usage.UsageError): self.parse("--op=add", "--info=id:x", *self.extra_args) def test_info_with_remove(self): with self.assertRaises(usage.UsageError): self.parse('--op=remove', '--info=x=v', *self.extra_args) def test_info_with_get(self): with self.assertRaises(usage.UsageError): self.parse('--op=get', '--info=x=v', *self.extra_args) class TestOptions(OptionsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): self.setUpOptions() self.setUpStdoutAssertions() def parse(self, *args): self.opts = runner.Options() self.opts.parseOptions(args) return self.opts def test_defaults(self): with self.assertRaises(usage.UsageError): self.parse() def test_version(self): try: self.parse('--version') except SystemExit as e: self.assertEqual(e.args[0], 0) self.assertInStdout('Buildbot version:') def test_verbose(self): self.patch(log, 'startLogging', mock.Mock()) with self.assertRaises(usage.UsageError): self.parse("--verbose") log.startLogging.assert_called_once_with(sys.stderr) class TestRun(unittest.TestCase): class MySubCommand(usage.Options): subcommandFunction = 'buildbot.test.unit.test_scripts_runner.subcommandFunction' optFlags = [ ['loud', 'l', 'be noisy'] ] def postOptions(self): if self['loud']: raise usage.UsageError('THIS IS ME BEING LOUD') def setUp(self): # patch our subcommand in self.patch(runner.Options, 'subCommands', [['my', None, self.MySubCommand, 'my, my']]) # and patch in the callback for it global subcommandFunction subcommandFunction = mock.Mock(name='subcommandFunction', return_value=3) def test_run_good(self): self.patch(sys, 'argv', ['buildbot', 'my']) try: runner.run() except SystemExit as e: self.assertEqual(e.args[0], 3) else: self.fail("didn't exit") def test_run_bad(self): self.patch(sys, 'argv', ['buildbot', 'my', '-l']) stdout = NativeStringIO() self.patch(sys, 'stdout', stdout) try: runner.run() except SystemExit as e: self.assertEqual(e.args[0], 1) else: self.fail("didn't exit") self.assertIn('THIS IS ME', stdout.getvalue()) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_sendchange.py000066400000000000000000000122671361162603000256450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.clients import sendchange as sendchange_client from buildbot.scripts import sendchange from buildbot.test.util import misc class TestSendChange(misc.StdoutAssertionsMixin, unittest.TestCase): class FakeSender: def __init__(self, testcase, master, auth, encoding=None): self.master = master self.auth = auth self.encoding = encoding self.testcase = testcase def send(self, branch, revision, comments, files, **kwargs): kwargs['branch'] = branch kwargs['revision'] = revision kwargs['comments'] = comments kwargs['files'] = files self.send_kwargs = kwargs d = defer.Deferred() if self.testcase.fail: reactor.callLater(0, d.errback, RuntimeError("oh noes")) else: reactor.callLater(0, d.callback, None) return d def setUp(self): self.fail = False # set to true to get Sender.send to fail def Sender_constr(*args, **kwargs): self.sender = self.FakeSender(self, *args, **kwargs) return self.sender self.patch(sendchange_client, 'Sender', Sender_constr) # undo the effects of @in_reactor self.patch(sendchange, 'sendchange', sendchange.sendchange._orig) self.setUpStdoutAssertions() @defer.inlineCallbacks def test_sendchange_config(self): rc = yield sendchange.sendchange(dict(encoding='utf16', who='me', auth=['a', 'b'], master='m', branch='br', category='cat', revision='rr', properties={'a': 'b'}, repository='rep', project='prj', vc='git', revlink='rl', when=1234.0, comments='comm', files=('a', 'b'), codebase='cb')) self.assertEqual((self.sender.master, self.sender.auth, self.sender.encoding, self.sender.send_kwargs, self.getStdout(), rc), ('m', ['a', 'b'], 'utf16', { 'branch': 'br', 'category': 'cat', 'codebase': 'cb', 'comments': 'comm', 'files': ('a', 'b'), 'project': 'prj', 'properties': {'a': 'b'}, 'repository': 'rep', 'revision': 'rr', 'revlink': 'rl', 'when': 1234.0, 'who': 'me', 'vc': 'git'}, 'change sent successfully', 0)) @defer.inlineCallbacks def test_sendchange_config_no_codebase(self): rc = yield sendchange.sendchange(dict(encoding='utf16', who='me', auth=['a', 'b'], master='m', branch='br', category='cat', revision='rr', properties={'a': 'b'}, repository='rep', project='prj', vc='git', revlink='rl', when=1234.0, comments='comm', files=('a', 'b'))) self.assertEqual((self.sender.master, self.sender.auth, self.sender.encoding, self.sender.send_kwargs, self.getStdout(), rc), ('m', ['a', 'b'], 'utf16', { 'branch': 'br', 'category': 'cat', 'codebase': None, 'comments': 'comm', 'files': ('a', 'b'), 'project': 'prj', 'properties': {'a': 'b'}, 'repository': 'rep', 'revision': 'rr', 'revlink': 'rl', 'when': 1234.0, 'who': 'me', 'vc': 'git'}, 'change sent successfully', 0)) @defer.inlineCallbacks def test_sendchange_fail(self): self.fail = True rc = yield sendchange.sendchange({}) self.assertEqual((self.getStdout().split('\n')[0], rc), ('change not sent:', 1)) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_start.py000066400000000000000000000107721361162603000247020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import time import mock import twisted from twisted.internet import defer from twisted.internet.utils import getProcessOutputAndValue from twisted.python import versions from twisted.trial import unittest from buildbot.scripts import start from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import flaky from buildbot.test.util.decorators import skipUnlessPlatformIs def mkconfig(**kwargs): config = { 'quiet': False, 'basedir': os.path.abspath('basedir'), 'nodaemon': False, } config.update(kwargs) return config fake_master_tac = """\ from twisted.application import service from twisted.internet import reactor from twisted.python import log application = service.Application('highscore') class App(service.Service): def startService(self): super().startService() log.msg("BuildMaster is running") # heh heh heh reactor.callLater(0, reactor.stop) app = App() app.setServiceParent(application) # isBuildmasterDir wants to see this -> Application('buildmaster') """ class TestStart(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write(fake_master_tac) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def test_start_not_basedir(self): self.assertEqual(start.start(mkconfig(basedir='doesntexist')), 1) self.assertInStdout('invalid buildmaster directory') def runStart(self, **config): args = [ '-c', 'from buildbot.scripts.start import start; import sys; ' 'sys.exit(start(%r))' % ( mkconfig(**config),), ] env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join(sys.path) return getProcessOutputAndValue(sys.executable, args=args, env=env) @defer.inlineCallbacks def test_start_no_daemon(self): (_, err, rc) = yield self.runStart(nodaemon=True) self.assertEqual((err, rc), (b'', 0)) @defer.inlineCallbacks def test_start_quiet(self): res = yield self.runStart(quiet=True) self.assertEqual(res, (b'', b'', 0)) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start_timeout_nonnumber(self): (out, err, rc) = yield self.runStart(start_timeout='a') self.assertEqual((rc, err), (1, b'')) self.assertSubstring(b'Start timeout must be a number\n', out) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start_timeout_number_string(self): # integer values from command-line options come in as strings res = yield self.runStart(start_timeout='10') self.assertEqual(res, (mock.ANY, b'', 0)) @flaky(bugNumber=2760) @skipUnlessPlatformIs('posix') @defer.inlineCallbacks def test_start(self): try: (out, err, rc) = yield self.runStart() self.assertEqual((rc, err), (0, b'')) self.assertSubstring( 'buildmaster appears to have (re)started correctly', out) finally: # wait for the pidfile to go away after the reactor.stop # in buildbot.tac takes effect pidfile = os.path.join('basedir', 'twistd.pid') while os.path.exists(pidfile): time.sleep(0.01) if twisted.version <= versions.Version('twisted', 9, 0, 0): test_start.skip = test_start_quiet.skip = "Skipping due to suprious PotentialZombieWarning." # the remainder of this script does obscene things: # - forks # - shells out to tail # - starts and stops the reactor # so testing it will be *far* more pain than is worthwhile buildbot-2.6.0/master/buildbot/test/unit/test_scripts_stop.py000066400000000000000000000123071361162603000245260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import signal import time from twisted.trial import unittest from buildbot.scripts import stop from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util.decorators import skipUnlessPlatformIs def mkconfig(**kwargs): config = dict(quiet=False, clean=False, basedir=os.path.abspath('basedir')) config['no-wait'] = kwargs.pop('no_wait', False) config.update(kwargs) return config class TestStop(misc.StdoutAssertionsMixin, dirs.DirsMixin, unittest.TestCase): def setUp(self): self.setUpDirs('basedir') self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() # tests def do_test_stop(self, config, kill_sequence, is_running=True, **kwargs): with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write("Application('buildmaster')") if is_running: with open("basedir/twistd.pid", 'wt') as f: f.write('1234') def sleep(t): self.assertTrue(kill_sequence, "unexpected sleep: %d" % t) what, exp_t = kill_sequence.pop(0) self.assertEqual((what, exp_t), ('sleep', t)) self.patch(time, 'sleep', sleep) def kill(pid, signal): self.assertTrue(kill_sequence, "unexpected signal: %d" % signal) exp_sig, result = kill_sequence.pop(0) self.assertEqual((pid, signal), (1234, exp_sig)) if isinstance(result, Exception): raise result return result self.patch(os, 'kill', kill) rv = stop.stop(config, **kwargs) self.assertEqual(kill_sequence, []) return rv @skipUnlessPlatformIs('posix') def test_stop_not_running(self): rv = self.do_test_stop(mkconfig(no_wait=True), [], is_running=False) self.assertInStdout('not running') self.assertEqual(rv, 0) @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains(self): rv = self.do_test_stop(mkconfig(no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process'))]) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) self.assertInStdout('not running') @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains_quiet(self): rv = self.do_test_stop(mkconfig(quiet=True, no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process'))],) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) self.assertWasQuiet() @skipUnlessPlatformIs('posix') def test_stop_dead_but_pidfile_remains_wait(self): rv = self.do_test_stop(mkconfig(no_wait=True), [(signal.SIGTERM, OSError(3, 'No such process')) ], wait=True) self.assertEqual(rv, 0) self.assertFalse(os.path.exists(os.path.join('basedir', 'twistd.pid'))) @skipUnlessPlatformIs('posix') def test_stop_slow_death_wait(self): rv = self.do_test_stop(mkconfig(no_wait=True), [ (signal.SIGTERM, None), ('sleep', 0.1), (0, None), # polling.. ('sleep', 1), (0, None), ('sleep', 1), (0, None), ('sleep', 1), (0, OSError(3, 'No such process')), ], wait=True) self.assertInStdout('is dead') self.assertEqual(rv, 0) @skipUnlessPlatformIs('posix') def test_stop_slow_death_wait_timeout(self): rv = self.do_test_stop(mkconfig(no_wait=True), [ (signal.SIGTERM, None), ('sleep', 0.1), ] + [(0, None), ('sleep', 1), ] * 10, wait=True) self.assertInStdout('never saw process') self.assertEqual(rv, 1) @skipUnlessPlatformIs('posix') def test_stop_slow_death_config_wait_timeout(self): rv = self.do_test_stop(mkconfig(), [ (signal.SIGTERM, None), ('sleep', 0.1), ] + [(0, None), ('sleep', 1), ] * 10, ) self.assertInStdout('never saw process') self.assertEqual(rv, 1) @skipUnlessPlatformIs('posix') def test_stop_clean(self): rv = self.do_test_stop(mkconfig(clean=True, no_wait=True), [ (signal.SIGUSR1, None), ], wait=False) self.assertInStdout('sent SIGUSR1 to process') self.assertEqual(rv, 0) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_trycmd.py000066400000000000000000000022451361162603000250430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.clients import tryclient from buildbot.scripts import trycmd class TestStatusLog(unittest.TestCase): def test_trycmd(self): Try = mock.Mock() self.patch(tryclient, 'Try', Try) inst = Try.return_value = mock.Mock(name='Try-instance') rc = trycmd.trycmd(dict(cfg=1)) Try.assert_called_with(dict(cfg=1)) inst.run.assert_called_with() self.assertEqual(rc, 0) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_tryserver.py000066400000000000000000000032221361162603000256020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot.scripts import tryserver from buildbot.test.util import dirs class TestStatusLog(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.newdir = os.path.join('jobdir', 'new') self.tmpdir = os.path.join('jobdir', 'tmp') self.setUpDirs("jobdir", self.newdir, self.tmpdir) def test_trycmd(self): config = dict(jobdir='jobdir') inputfile = NativeStringIO('this is my try job') self.patch(sys, 'stdin', inputfile) rc = tryserver.tryserver(config) self.assertEqual(rc, 0) newfiles = os.listdir(self.newdir) tmpfiles = os.listdir(self.tmpdir) self.assertEqual((len(newfiles), len(tmpfiles)), (1, 0)) with open(os.path.join(self.newdir, newfiles[0]), 'rt') as f: self.assertEqual(f.read(), 'this is my try job') buildbot-2.6.0/master/buildbot/test/unit/test_scripts_upgrade_master.py000066400000000000000000000202601361162603000265400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys import mock from twisted.internet import defer from twisted.python.compat import NativeStringIO from twisted.trial import unittest from buildbot import config as config_module from buildbot.db import connector from buildbot.db import masters from buildbot.db import model from buildbot.scripts import base from buildbot.scripts import upgrade_master from buildbot.test.util import dirs from buildbot.test.util import misc from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin def mkconfig(**kwargs): config = dict(quiet=False, replace=False, basedir='test') config.update(kwargs) return config class TestUpgradeMaster(dirs.DirsMixin, misc.StdoutAssertionsMixin, unittest.TestCase): def setUp(self): # createMaster is decorated with @in_reactor, so strip that decoration # since the master is already running self.patch(upgrade_master, 'upgradeMaster', upgrade_master.upgradeMaster._orig) self.setUpDirs('test') self.setUpStdoutAssertions() def patchFunctions(self, basedirOk=True, configOk=True): self.calls = [] def checkBasedir(config): self.calls.append('checkBasedir') return basedirOk self.patch(base, 'checkBasedir', checkBasedir) def loadConfig(config, configFileName='master.cfg'): self.calls.append('loadConfig') return config_module.MasterConfig() if configOk else False self.patch(base, 'loadConfig', loadConfig) def upgradeFiles(config): self.calls.append('upgradeFiles') self.patch(upgrade_master, 'upgradeFiles', upgradeFiles) def upgradeDatabase(config, master_cfg): self.assertIsInstance(master_cfg, config_module.MasterConfig) self.calls.append('upgradeDatabase') self.patch(upgrade_master, 'upgradeDatabase', upgradeDatabase) # tests @defer.inlineCallbacks def test_upgradeMaster_success(self): self.patchFunctions() rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 0) self.assertInStdout('upgrade complete') @defer.inlineCallbacks def test_upgradeMaster_quiet(self): self.patchFunctions() rv = yield upgrade_master.upgradeMaster(mkconfig(quiet=True), _noMonkey=True) self.assertEqual(rv, 0) self.assertWasQuiet() @defer.inlineCallbacks def test_upgradeMaster_bad_basedir(self): self.patchFunctions(basedirOk=False) rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 1) @defer.inlineCallbacks def test_upgradeMaster_bad_config(self): self.patchFunctions(configOk=False) rv = yield upgrade_master.upgradeMaster(mkconfig(), _noMonkey=True) self.assertEqual(rv, 1) class TestUpgradeMasterFunctions(www.WwwTestMixin, dirs.DirsMixin, misc.StdoutAssertionsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpDirs('test') self.basedir = os.path.abspath(os.path.join('test', 'basedir')) self.setUpStdoutAssertions() def tearDown(self): self.tearDownDirs() def writeFile(self, path, contents): with open(path, 'wt') as f: f.write(contents) def readFile(self, path): with open(path, 'rt') as f: return f.read() # tests def test_installFile(self): self.writeFile('test/srcfile', 'source data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertInStdout('creating test/destfile') def test_installFile_existing_differing(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'dest data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'dest data') self.assertEqual(self.readFile('test/destfile.new'), 'source data') self.assertInStdout('writing new contents to') def test_installFile_existing_differing_overwrite(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'dest data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile', overwrite=True) self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertFalse(os.path.exists('test/destfile.new')) self.assertInStdout('overwriting') def test_installFile_existing_same(self): self.writeFile('test/srcfile', 'source data') self.writeFile('test/destfile', 'source data') upgrade_master.installFile(mkconfig(), 'test/destfile', 'test/srcfile') self.assertEqual(self.readFile('test/destfile'), 'source data') self.assertFalse(os.path.exists('test/destfile.new')) self.assertWasQuiet() def test_installFile_quiet(self): self.writeFile('test/srcfile', 'source data') upgrade_master.installFile(mkconfig(quiet=True), 'test/destfile', 'test/srcfile') self.assertWasQuiet() def test_upgradeFiles(self): upgrade_master.upgradeFiles(mkconfig()) for f in [ 'test/master.cfg.sample', ]: self.assertTrue(os.path.exists(f), "%s not found" % f) self.assertInStdout('upgrading basedir') def test_upgradeFiles_notice_about_unused_public_html(self): os.mkdir('test/public_html') self.writeFile('test/public_html/index.html', 'INDEX') upgrade_master.upgradeFiles(mkconfig()) self.assertInStdout('public_html is not used') @defer.inlineCallbacks def test_upgradeDatabase(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) upgrade = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(model.Model, 'upgrade', upgrade) setAllMastersActiveLongTimeAgo = mock.Mock( side_effect=lambda **kwargs: defer.succeed(None)) self.patch(masters.MastersConnectorComponent, 'setAllMastersActiveLongTimeAgo', setAllMastersActiveLongTimeAgo) yield upgrade_master.upgradeDatabase( mkconfig(basedir='test', quiet=True), config_module.MasterConfig()) setup.asset_called_with(check_version=False, verbose=False) upgrade.assert_called_with() self.assertWasQuiet() @defer.inlineCallbacks def test_upgradeDatabaseFail(self): setup = mock.Mock(side_effect=lambda **kwargs: defer.succeed(None)) self.patch(connector.DBConnector, 'setup', setup) self.patch(sys, 'stderr', NativeStringIO()) upgrade = mock.Mock( side_effect=lambda **kwargs: defer.fail(Exception("o noz"))) self.patch(model.Model, 'upgrade', upgrade) ret = yield upgrade_master._upgradeMaster( mkconfig(basedir='test', quiet=True), config_module.MasterConfig()) self.assertEqual(ret, 1) self.assertIn("problem while upgrading!:\nTraceback (most recent call last):\n", sys.stderr.getvalue()) self.assertIn("o noz", sys.stderr.getvalue()) buildbot-2.6.0/master/buildbot/test/unit/test_scripts_user.py000066400000000000000000000077241361162603000245260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest from buildbot.clients import usersclient from buildbot.process.users import users from buildbot.scripts import user class TestUsersClient(unittest.TestCase): class FakeUsersClient: def __init__(self, master, username="user", passwd="userpw", port=0): self.master = master self.port = port self.username = username self.passwd = passwd self.fail = False def send(self, op, bb_username, bb_password, ids, info): self.op = op self.bb_username = bb_username self.bb_password = bb_password self.ids = ids self.info = info d = defer.Deferred() if self.fail: reactor.callLater(0, d.errback, RuntimeError("oh noes")) else: reactor.callLater(0, d.callback, None) return d def setUp(self): def fake_UsersClient(*args): self.usersclient = self.FakeUsersClient(*args) return self.usersclient self.patch(usersclient, 'UsersClient', fake_UsersClient) # un-do the effects of @in_reactor self.patch(user, 'user', user.user._orig) @defer.inlineCallbacks def test_usersclient_send_ids(self): yield user.user(dict(master='a:9990', username="x", passwd="y", op='get', bb_username=None, bb_password=None, ids=['me', 'you'], info=None)) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.ids, c.info), ('a', 9990, "x", "y", 'get', ['me', 'you'], None)) @defer.inlineCallbacks def test_usersclient_send_update_info(self): def _fake_encrypt(passwd): assert passwd == 'day' return 'ENCRY' self.patch(users, 'encrypt', _fake_encrypt) yield user.user(dict(master='a:9990', username="x", passwd="y", op='update', bb_username='bud', bb_password='day', ids=None, info=[{'identifier': 'x', 'svn': 'x'}])) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.bb_username, c.bb_password, c.ids, c.info), ('a', 9990, "x", "y", 'update', 'bud', 'ENCRY', None, [{'identifier': 'x', 'svn': 'x'}])) @defer.inlineCallbacks def test_usersclient_send_add_info(self): yield user.user(dict(master='a:9990', username="x", passwd="y", op='add', bb_username=None, bb_password=None, ids=None, info=[{'git': 'x ', 'irc': 'aaa'}])) c = self.usersclient self.assertEqual((c.master, c.port, c.username, c.passwd, c.op, c.bb_username, c.bb_password, c.ids, c.info), ('a', 9990, "x", "y", 'add', None, None, None, [{'identifier': 'aaa', 'git': 'x ', 'irc': 'aaa'}])) buildbot-2.6.0/master/buildbot/test/unit/test_secret_in_file.py000066400000000000000000000104731361162603000247460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import stat from twisted.internet import defer from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.secrets.providers.file import SecretInAFile from buildbot.test.util.config import ConfigErrorsMixin from buildbot.util.misc import writeLocalFile class TestSecretInFile(ConfigErrorsMixin, unittest.TestCase): def createTempDir(self, dirname): tempdir = FilePath(self.mktemp()) tempdir.createDirectory() return tempdir.path def createFileTemp(self, tempdir, filename, text="", chmodRights=0o700): file_path = os.path.join(tempdir, filename) writeLocalFile(file_path, text, chmodRights) return file_path @defer.inlineCallbacks def setUp(self): self.tmp_dir = self.createTempDir("temp") self.filepath = self.createFileTemp(self.tmp_dir, "tempfile.txt", text="key value\n") self.srvfile = SecretInAFile(self.tmp_dir) yield self.srvfile.startService() @defer.inlineCallbacks def tearDown(self): yield self.srvfile.stopService() def testCheckConfigSecretInAFileService(self): self.assertEqual(self.srvfile.name, "SecretInAFile") self.assertEqual(self.srvfile._dirname, self.tmp_dir) def testCheckConfigErrorSecretInAFileService(self): if os.name != "posix": self.skipTest("Permission checks only works on posix systems") filepath = self.createFileTemp(self.tmp_dir, "tempfile2.txt", chmodRights=stat.S_IRGRP) expctd_msg_error = " on file tempfile2.txt are too " \ "open. It is required that your secret files are" \ " NOT accessible by others!" with self.assertRaisesConfigError(expctd_msg_error): self.srvfile.checkConfig(self.tmp_dir) os.remove(filepath) @defer.inlineCallbacks def testCheckConfigfileExtension(self): filepath = self.createFileTemp(self.tmp_dir, "tempfile2.ini", text="test suffix", chmodRights=stat.S_IRWXU) filepath2 = self.createFileTemp(self.tmp_dir, "tempfile2.txt", text="some text", chmodRights=stat.S_IRWXU) yield self.srvfile.reconfigService(self.tmp_dir, suffixes=[".ini"]) self.assertEqual(self.srvfile.get("tempfile2"), "test suffix") self.assertEqual(self.srvfile.get("tempfile3"), None) os.remove(filepath) os.remove(filepath2) @defer.inlineCallbacks def testReconfigSecretInAFileService(self): otherdir = self.createTempDir("temp2") yield self.srvfile.reconfigService(otherdir) self.assertEqual(self.srvfile.name, "SecretInAFile") self.assertEqual(self.srvfile._dirname, otherdir) def testGetSecretInFile(self): value = self.srvfile.get("tempfile.txt") self.assertEqual(value, "key value") @defer.inlineCallbacks def testGetSecretInFileSuffixes(self): yield self.srvfile.reconfigService(self.tmp_dir, suffixes=[".txt"]) value = self.srvfile.get("tempfile") self.assertEqual(value, "key value") def testGetSecretInFileNotFound(self): value = self.srvfile.get("tempfile2.txt") self.assertEqual(value, None) @defer.inlineCallbacks def testGetSecretInFileNoStrip(self): yield self.srvfile.reconfigService(self.tmp_dir, strip=False) value = self.srvfile.get("tempfile.txt") self.assertEqual(value, "key value\n") buildbot-2.6.0/master/buildbot/test/unit/test_secret_in_passwordstore.py000066400000000000000000000104511361162603000267420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from pathlib import Path from unittest.mock import patch from twisted.internet import defer from twisted.internet import utils from twisted.python.filepath import FilePath from twisted.trial import unittest from buildbot.secrets.providers.passwordstore import SecretInPass from buildbot.test.util.config import ConfigErrorsMixin class TestSecretInPass(ConfigErrorsMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): with patch.object(Path, "is_file", return_value=True): self.tmp_dir = self.create_temp_dir("temp") self.srvpass = SecretInPass("password", self.tmp_dir) yield self.srvpass.startService() @defer.inlineCallbacks def tearDown(self): yield self.srvpass.stopService() def create_temp_dir(self, dirname): tempdir = FilePath(self.mktemp()) tempdir.createDirectory() return tempdir.path def test_check_config_secret_in_pass_service(self): self.assertEqual(self.srvpass.name, "SecretInPass") env = self.srvpass._env self.assertEquals(env["PASSWORD_STORE_GPG_OPTS"], "--passphrase password") self.assertEquals(env["PASSWORD_STORE_DIR"], self.tmp_dir) def test_check_config_binary_error_secret_in_pass_service(self): expected_error_msg = "pass does not exist in PATH" with patch.object(Path, "is_file", return_value=False): with self.assertRaisesConfigError(expected_error_msg): self.srvpass.checkConfig("password", "temp") def test_check_config_directory_error_secret_in_pass_service(self): expected_error_msg = "directory temp2 does not exist" with patch.object(Path, "is_file", return_value=True): with self.assertRaisesConfigError(expected_error_msg): self.srvpass.checkConfig("password", "temp2") @defer.inlineCallbacks def test_reconfig_secret_in_a_file_service(self): with patch.object(Path, "is_file", return_value=True): otherdir = self.create_temp_dir("temp2") yield self.srvpass.reconfigService("password2", otherdir) self.assertEqual(self.srvpass.name, "SecretInPass") env = self.srvpass._env self.assertEquals(env["PASSWORD_STORE_GPG_OPTS"], "--passphrase password2") self.assertEquals(env["PASSWORD_STORE_DIR"], otherdir) @defer.inlineCallbacks def test_get_secret_in_pass(self): with patch.object(utils, "getProcessOutput", return_value=b"value"): value = yield self.srvpass.get("secret") self.assertEqual(value, "value") @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_unix(self): with patch.object(utils, "getProcessOutput", return_value=b"value1\nvalue2\nvalue3"): value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_darwin(self): with patch.object(utils, "getProcessOutput", return_value=b"value1\rvalue2\rvalue3"): value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") @defer.inlineCallbacks def test_get_secret_in_pass_multiple_lines_windows(self): with patch.object(utils, "getProcessOutput", return_value=b"value1\r\nvalue2\r\nvalue3"): value = yield self.srvpass.get("secret") self.assertEqual(value, "value1") @defer.inlineCallbacks def test_get_secret_in_pass_not_found(self): with patch.object(utils, "getProcessOutput", side_effect=IOError()): value = yield self.srvpass.get("secret") self.assertEqual(value, None) buildbot-2.6.0/master/buildbot/test/unit/test_secret_in_vault.py000066400000000000000000000136151361162603000251630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.secrets.providers.vault import HashiCorpVaultSecretProvider from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin class TestSecretInVaultHttpFakeBase(ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self, version): self.setUpTestReactor() self.srvcVault = HashiCorpVaultSecretProvider(vaultServer="http://vaultServer", vaultToken="someToken", apiVersion=version) self.master = fakemaster.make_master(self, wantData=True) self._http = self.successResultOf( fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'http://vaultServer', headers={'X-Vault-Token': "someToken"})) yield self.srvcVault.setServiceParent(self.master) self.successResultOf(self.master.startService()) @defer.inlineCallbacks def tearDown(self): yield self.srvcVault.stopService() class TestSecretInVaultV1(TestSecretInVaultHttpFakeBase): def setUp(self): super().setUp(version=1) @defer.inlineCallbacks def testGetValue(self): self._http.expect(method='get', ep='/v1/secret/value', params=None, data=None, json=None, code=200, content_json={"data": {"value": "value1"}}) value = yield self.srvcVault.get("value") self.assertEqual(value, "value1") @defer.inlineCallbacks def testGetValueNotFound(self): self._http.expect(method='get', ep='/v1/secret/value', params=None, data=None, json=None, code=200, content_json={"data": {"valueNotFound": "value1"}}) value = yield self.srvcVault.get("value") self.assertEqual(value, None) @defer.inlineCallbacks def testGetError(self): self._http.expect(method='get', ep='/v1/secret/valueNotFound', params=None, data=None, json=None, code=404, content_json={"data": {"valueNotFound": "value1"}}) yield self.assertFailure(self.srvcVault.get("valueNotFound"), KeyError) def testCheckConfigSecretInVaultService(self): self.assertEqual(self.srvcVault.name, "SecretInVault") self.assertEqual(self.srvcVault.vaultServer, "http://vaultServer") self.assertEqual(self.srvcVault.vaultToken, "someToken") def testCheckConfigErrorSecretInVaultService(self): with self.assertRaisesConfigError( "vaultServer must be a string while it is"): self.srvcVault.checkConfig() def testCheckConfigErrorSecretInVaultServiceWrongServerAddress(self): with self.assertRaisesConfigError( "vaultToken must be a string while it is"): self.srvcVault.checkConfig(vaultServer="serveraddr") def test_check_config_error_apiVersion_unsupported(self): with self.assertRaisesConfigError( "apiVersion 0 is not supported"): self.srvcVault.checkConfig(vaultServer="serveraddr", vaultToken="vaultToken", apiVersion=0) @defer.inlineCallbacks def testReconfigSecretInVaultService(self): self._http = self.successResultOf( fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'serveraddr', headers={'X-Vault-Token': "someToken"})) yield self.srvcVault.reconfigService(vaultServer="serveraddr", vaultToken="someToken") self.assertEqual(self.srvcVault.vaultServer, "serveraddr") self.assertEqual(self.srvcVault.vaultToken, "someToken") class TestSecretInVaultV2(TestSecretInVaultHttpFakeBase): def setUp(self): super().setUp(version=2) @defer.inlineCallbacks def testGetValue(self): self._http.expect(method='get', ep='/v1/secret/data/value', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"value": "value1"}}}) value = yield self.srvcVault.get("value") self.assertEqual(value, "value1") @defer.inlineCallbacks def testGetValueNotFound(self): self._http.expect(method='get', ep='/v1/secret/data/value', params=None, data=None, json=None, code=200, content_json={"data": {"data": {"valueNotFound": "value1"}}}) value = yield self.srvcVault.get("value") self.assertEqual(value, None) @defer.inlineCallbacks def testGetError(self): self._http.expect(method='get', ep='/v1/secret/data/valueNotFound', params=None, data=None, json=None, code=404, content_json={"data": {"data": {"valueNotFound": "value1"}}}) yield self.assertFailure(self.srvcVault.get("valueNotFound"), KeyError) buildbot-2.6.0/master/buildbot/test/unit/test_secret_rendered_service.py000066400000000000000000000041571361162603000266530ustar00rootroot00000000000000from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Secret from buildbot.secrets.manager import SecretManager from buildbot.test.fake import fakemaster from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util.misc import TestReactorMixin from buildbot.util.service import BuildbotService class FakeServiceUsingSecrets(BuildbotService): name = "FakeServiceUsingSecrets" secrets = ["foo", "bar", "secret"] def reconfigService(self, foo=None, bar=None, secret=None, other=None): self.foo = foo self.bar = bar self.secret = secret def returnRenderedSecrets(self, secretKey): try: return getattr(self, secretKey) except Exception: raise Exception class TestRenderSecrets(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) fakeStorageService = FakeSecretStorage(secretdict={"foo": "bar", "other": "value"}) self.secretsrv = SecretManager() self.secretsrv.services = [fakeStorageService] yield self.secretsrv.setServiceParent(self.master) self.srvtest = FakeServiceUsingSecrets() yield self.srvtest.setServiceParent(self.master) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_secret_rendered(self): yield self.srvtest.configureService() new = FakeServiceUsingSecrets(foo=Secret("foo"), other=Secret("other")) yield self.srvtest.reconfigServiceWithSibling(new) self.assertEqual("bar", self.srvtest.returnRenderedSecrets("foo")) @defer.inlineCallbacks def test_secret_rendered_not_found(self): new = FakeServiceUsingSecrets(foo=Secret("foo")) yield self.srvtest.reconfigServiceWithSibling(new) with self.assertRaises(Exception): self.srvtest.returnRenderedSecrets("more") buildbot-2.6.0/master/buildbot/test/unit/test_stats_service.py000066400000000000000000000524231361162603000246530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import threads from twisted.trial import unittest from buildbot import config from buildbot.errors import CaptureCallbackError from buildbot.statistics import capture from buildbot.statistics import storage_backends from buildbot.statistics.storage_backends.base import StatsStorageBase from buildbot.statistics.storage_backends.influxdb_client import InfluxStorageService from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import fakestats from buildbot.test.util import logging from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestStatsServicesBase(TestReactorMixin, unittest.TestCase): BUILDER_NAMES = ['builder1', 'builder2'] BUILDER_IDS = [1, 2] def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True, wantDb=True) for builderid, name in zip(self.BUILDER_IDS, self.BUILDER_NAMES): self.master.db.builders.addTestBuilder( builderid=builderid, name=name) self.stats_service = fakestats.FakeStatsService(master=self.master, storage_backends=[ fakestats.FakeStatsStorageService() ], name="FakeStatsService") self.stats_service.startService() def tearDown(self): self.stats_service.stopService() class DummyStatsStorageBase(StatsStorageBase): """ A dummy class to test initialization of StatsStorageBase. """ def thd_postStatsValue(self, *args, **kwargs): return defer.succeed(None) class TestStatsServicesConfiguration(TestStatsServicesBase): def test_reconfig_with_no_storage_backends(self): new_storage_backends = [] self.stats_service.reconfigService(new_storage_backends) self.checkEqual(new_storage_backends) def test_reconfig_with_fake_storage_backend(self): new_storage_backends = [ fakestats.FakeStatsStorageService(name='One'), fakestats.FakeStatsStorageService(name='Two') ] self.stats_service.reconfigService(new_storage_backends) self.checkEqual(new_storage_backends) def test_reconfig_with_consumers(self): backend = fakestats.FakeStatsStorageService(name='One') backend.captures = [capture.CaptureProperty('test_builder', 'test')] new_storage_backends = [backend] self.stats_service.reconfigService(new_storage_backends) self.stats_service.reconfigService(new_storage_backends) self.assertEqual(len(self.master.mq.qrefs), 1) def test_bad_configuration(self): # Reconfigure with a bad configuration. new_storage_backends = [mock.Mock()] with self.assertRaises(TypeError): self.stats_service.reconfigService(new_storage_backends) def checkEqual(self, new_storage_backends): # Check whether the new_storage_backends was set in reconfigService registeredStorageServices = \ [s for s in self.stats_service.registeredStorageServices if isinstance(s, StatsStorageBase)] for s in new_storage_backends: if s not in registeredStorageServices: raise AssertionError("reconfigService failed." "Not all storage services registered.") class TestInfluxDB(TestStatsServicesBase, logging.LoggingMixin): # Smooth test of influx db service. We don't want to force people to install influxdb, so we # just disable this unit test if the influxdb module is not installed, # using SkipTest def test_influxdb_not_installed(self): captures = [capture.CaptureProperty('test_builder', 'test')] try: # Try to import import influxdb # pylint: disable=import-outside-toplevel # consume it somehow to please pylint [influxdb] except ImportError: with self.assertRaises(config.ConfigErrors): InfluxStorageService("fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures) # if instead influxdb is installed, then initialize it - no errors # should be realized else: new_storage_backends = [ InfluxStorageService("fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures) ] self.stats_service.reconfigService(new_storage_backends) def test_influx_storage_service_fake_install(self): # use a fake InfluxDBClient to test InfluxStorageService in systems which # don't have influxdb installed. Primarily useful for test coverage. self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) captures = [capture.CaptureProperty('test_builder', 'test')] new_storage_backends = [InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", captures )] self.stats_service.reconfigService(new_storage_backends) def test_influx_storage_service_post_value(self): # test the thd_postStatsValue method of InfluxStorageService self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) svc = InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", "fake_stats") post_data = { 'name': 'test', 'value': 'test' } context = {'x': 'y'} svc.thd_postStatsValue(post_data, "test_series_name", context) data = { 'measurement': "test_series_name", 'fields': { "name": "test", "value": "test" }, 'tags': {'x': 'y'} } points = [data] self.assertEqual(svc.client.points, points) def test_influx_service_not_inited(self): self.setUpLogging() self.patch(storage_backends.influxdb_client, 'InfluxDBClient', fakestats.FakeInfluxDBClient) svc = InfluxStorageService( "fake_url", "fake_port", "fake_user", "fake_password", "fake_db", "fake_stats") svc._inited = False svc.thd_postStatsValue("test", "test", "test") self.assertLogged("Service.*not initialized") def test_storage_backend_base_failure_on_init(self): svc = DummyStatsStorageBase() r = svc.thd_postStatsValue("test", "test", "test") assert isinstance(r, defer.Deferred) assert r.result is None class TestStatsServicesConsumers(steps.BuildStepMixin, TestStatsServicesBase): """ Test the stats service from a fake step """ def setUp(self): super().setUp() self.routingKey = ( "builders", self.BUILDER_IDS[0], "builds", 1, "finished") self.master.mq.verifyMessages = False self.patch(threads, 'deferToThread', self.identity) def setupBuild(self): self.master.db.insertTestData([ fakedb.Build(id=1, masterid=1, workerid=1, builderid=self.BUILDER_IDS[0], buildrequestid=1, number=1), ]) def setupFakeStorage(self, captures): self.fake_storage_service = fakestats.FakeStatsStorageService() self.fake_storage_service.captures = captures self.stats_service.reconfigService([self.fake_storage_service]) def get_dict(self, build): return dict( buildid=1, number=build['number'], builderid=build['builderid'], buildrequestid=build['buildrequestid'], workerid=build['workerid'], masterid=build['masterid'], started_at=build['started_at'], complete=True, complete_at=build['complete_at'], state_string='', results=0, ) @defer.inlineCallbacks def end_build_call_consumers(self): self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) self.master.mq.callConsumer(self.routingKey, self.get_dict(build)) @staticmethod def identity(f, *args, **kwargs): return f(*args, **kwargs) @defer.inlineCallbacks def test_property_capturing(self): self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_name')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_all_builders(self): self.setupFakeStorage( [capture.CapturePropertyAllBuilders('test_name')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_regex(self): self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_n.*', regex=True)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_property_capturing_error(self): self.setupFakeStorage([capture.CaptureProperty('builder1', 'test')]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) @defer.inlineCallbacks def test_property_capturing_alt_callback(self): def cb(*args, **kwargs): return 'test_value' self.setupFakeStorage( [capture.CaptureProperty('builder1', 'test_name', cb)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') yield self.end_build_call_consumers() self.assertEqual([( {'name': 'test_name', 'value': 'test_value'}, 'builder1-test_name', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_build_start_time_capturing(self): self.setupFakeStorage([capture.CaptureBuildStartTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_start_time_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildStartTimeAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_start_time_capturing_alt_callback(self): def cb(*args, **kwargs): return '2015-07-08T01:45:17.391018' self.setupFakeStorage([capture.CaptureBuildStartTime('builder1', cb)]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'start-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing(self): self.setupFakeStorage([capture.CaptureBuildEndTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildEndTimeAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_end_time_capturing_alt_callback(self): def cb(*args, **kwargs): return '2015-07-08T01:45:17.391018' self.setupFakeStorage([capture.CaptureBuildEndTime('builder1', cb)]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'end-time', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def build_time_capture_helper(self, time_type, cb=None): self.setupFakeStorage([capture.CaptureBuildDuration('builder1', report_in=time_type, callback=cb)]) self.setupBuild() yield self.end_build_call_consumers() @defer.inlineCallbacks def test_build_duration_capturing_seconds(self): yield self.build_time_capture_helper('seconds') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_minutes(self): yield self.build_time_capture_helper('minutes') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_hours(self): yield self.build_time_capture_helper('hours') self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) def test_build_duration_report_in_error(self): with self.assertRaises(config.ConfigErrors): capture.CaptureBuildDuration('builder1', report_in='foobar') @defer.inlineCallbacks def test_build_duration_capturing_alt_callback(self): def cb(*args, **kwargs): return 10 yield self.build_time_capture_helper('seconds', cb) self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_duration_capturing_all_builders(self): self.setupFakeStorage([capture.CaptureBuildDurationAllBuilders()]) self.setupBuild() yield self.end_build_call_consumers() self.assertEqual( 'duration', list(self.fake_storage_service.stored_data[0][0].keys())[0]) @defer.inlineCallbacks def test_build_times_capturing_error(self): def cb(*args, **kwargs): raise TypeError self.setupFakeStorage([capture.CaptureBuildStartTime('builder1', cb)]) self.setupBuild() self.master.db.builds.setBuildProperty( 1, 'test_name', 'test_value', 'test_source') self.master.db.builds.finishBuild(buildid=1, results=0) build = yield self.master.db.builds.getBuild(buildid=1) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) self.setupFakeStorage([capture.CaptureBuildEndTime('builder1', cb)]) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) self.setupFakeStorage( [capture.CaptureBuildDuration('builder1', callback=cb)]) cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(self.routingKey, self.get_dict(build)), CaptureCallbackError) @defer.inlineCallbacks def test_yield_metrics_value(self): self.setupFakeStorage([capture.CaptureBuildStartTime('builder1')]) self.setupBuild() yield self.end_build_call_consumers() yield self.stats_service.yieldMetricsValue('test', {'test': 'test'}, 1) build_data = yield self.stats_service.master.data.get(('builds', 1)) routingKey = ("stats-yieldMetricsValue", "stats-yield-data") msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } exp = [(routingKey, msg)] self.stats_service.master.mq.assertProductions(exp) @defer.inlineCallbacks def test_capture_data(self): self.setupFakeStorage([capture.CaptureData('test', 'builder1')]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_all_builders(self): self.setupFakeStorage([capture.CaptureDataAllBuilders('test')]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_alt_callback(self): def cb(*args, **kwargs): return {'test': 'test'} self.setupFakeStorage([capture.CaptureData('test', 'builder1', cb)]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") self.master.mq.callConsumer(routingKey, msg) self.assertEqual([( {'test': 'test'}, 'builder1-test', {'build_number': '1', 'builder_name': 'builder1'} )], self.fake_storage_service.stored_data) @defer.inlineCallbacks def test_capture_data_error(self): def cb(*args, **kwargs): raise TypeError self.setupFakeStorage([capture.CaptureData('test', 'builder1', cb)]) self.setupBuild() self.master.db.builds.finishBuild(buildid=1, results=0) build_data = yield self.stats_service.master.data.get(('builds', 1)) msg = { 'data_name': 'test', 'post_data': {'test': 'test'}, 'build_data': build_data } routingKey = ("stats-yieldMetricsValue", "stats-yield-data") cap = self.fake_storage_service.captures[0] yield self.assertFailure(cap.consume(routingKey, msg), CaptureCallbackError) buildbot-2.6.0/master/buildbot/test/unit/test_steps_cmake.py000066400000000000000000000117711361162603000242740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.config import ConfigErrors from buildbot.process.properties import Property from buildbot.process.results import SUCCESS from buildbot.steps.cmake import CMake from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.steps import BuildStepMixin class TestCMake(BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuildStep() def tearDown(self): self.tearDownBuildStep() def expect_and_run_command(self, *params): command = [CMake.DEFAULT_CMAKE] + list(params) self.expectCommands( ExpectShell(command=command, workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions_type(self): with self.assertRaises(ConfigErrors): CMake(definitions='hello') def test_options_type(self): with self.assertRaises(ConfigErrors): CMake(options='hello') def test_plain(self): self.setupStep(CMake()) self.expectCommands( ExpectShell(command=[CMake.DEFAULT_CMAKE], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_cmake(self): cmake_bin = 'something/else/cmake' self.setupStep(CMake(cmake=cmake_bin)) self.expectCommands( ExpectShell(command=[cmake_bin], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_cmake_interpolation(self): prop = 'CMAKE' value = 'Real_CMAKE' self.setupStep(CMake(cmake=Property(prop))) self.properties.setProperty(prop, value, source='test') self.expectCommands( ExpectShell(command=[value], workdir='wkdir') + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions(self): definition = { 'a': 'b' } self.setupStep(CMake(definitions=definition)) self.expect_and_run_command('-D%s=%s' % list(definition.items())[0]) def test_environment(self): command = [CMake.DEFAULT_CMAKE] environment = {'a': 'b'} self.setupStep(CMake(env=environment)) self.expectCommands( ExpectShell( command=command, workdir='wkdir', env={'a': 'b'}) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_definitions_interpolation(self): b_value = 'real_b' definitions = { 'a': Property('b') } self.setupStep(CMake(definitions=definitions)) self.properties.setProperty('b', b_value, source='test') self.expect_and_run_command('-D%s=%s' % ('a', b_value)) def test_definitions_renderable(self): b_value = 'real_b' definitions = Property('b') self.setupStep(CMake(definitions=definitions)) self.properties.setProperty('b', {'a': b_value}, source='test') self.expect_and_run_command('-D%s=%s' % ('a', b_value)) def test_generator(self): generator = 'Ninja' self.setupStep(CMake(generator=generator)) self.expect_and_run_command('-G', generator) def test_generator_interpolation(self): value = 'Our_GENERATOR' self.setupStep(CMake(generator=Property('GENERATOR'))) self.properties.setProperty('GENERATOR', value, source='test') self.expect_and_run_command('-G', value) def test_options(self): options = ('A', 'B') self.setupStep(CMake(options=options)) self.expect_and_run_command(*options) def test_options_interpolation(self): prop = 'option' value = 'value' self.setupStep(CMake(options=(Property(prop),))) self.properties.setProperty(prop, value, source='test') self.expect_and_run_command(value) def test_path(self): path = 'some/path' self.setupStep(CMake(path=path)) self.expect_and_run_command(path) def test_path_interpolation(self): prop = 'path' value = 'some/path' self.setupStep(CMake(path=Property(prop))) self.properties.setProperty(prop, value, source='test') self.expect_and_run_command(value) buildbot-2.6.0/master/buildbot/test/unit/test_steps_cppcheck.py000066400000000000000000000075171361162603000247770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.properties import WithProperties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import cppcheck from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class Cppcheck(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(cppcheck.Cppcheck(enable=['all'], inconclusive=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', '.', '--enable=all', '--inconclusive']) + ExpectShell.log('stdio', stdout='Checking file1.c...') + 0) self.expectOutcome(result=SUCCESS, state_string="cppcheck") return self.runStep() def test_warnings(self): self.setupStep( cppcheck.Cppcheck(source=['file1.c'], enable=['warning', 'performance'])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', 'file1.c', '--enable=warning,performance']) + ExpectShell.log( 'stdio', stdout=('Checking file1.c...\n' '[file1.c:3]: (warning) Logical disjunction always evaluates to true: t >= 0 || t < 65.\n' '(information) Cppcheck cannot find all the include files (use --check-config for details)')) + 0) self.expectOutcome(result=WARNINGS, state_string="cppcheck warning=1 information=1 (warnings)") return self.runStep() def test_errors(self): self.setupStep(cppcheck.Cppcheck(extra_args=['--my-param=5'])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'cppcheck', '.', '--my-param=5']) + ExpectShell.log( 'stdio', stdout=('Checking file1.c...\n' '[file1.c:3]: (error) Possible null pointer dereference: filter\n' '[file1.c:4]: (error) Memory leak: columns\n' "[file1.c:7]: (style) The scope of the variable 'pid' can be reduced")) + 0) self.expectOutcome(result=FAILURE, state_string="cppcheck error=2 style=1 (failure)") return self.runStep() def test_renderables(self): P = WithProperties self.setupStep(cppcheck.Cppcheck( binary=P('a'), source=[P('.'), P('f.c')], extra_args=[P('--p'), P('--p')])) self.expectCommands( ExpectShell(workdir='wkdir', command=[ 'a', '.', 'f.c', '--p', '--p']) + ExpectShell.log( 'stdio', stdout='Checking file1.c...') + 0) self.expectOutcome(result=SUCCESS, state_string="cppcheck") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_http.py000066400000000000000000000105061361162603000241660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import reactor from twisted.trial import unittest from twisted.web.resource import Resource from twisted.web.server import Site from buildbot.process import properties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import http from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin try: import txrequests assert txrequests import requests assert requests except ImportError: txrequests = requests = None # We use twisted's internal webserver instead of mocking requests # to be sure we use the correct requests interfaces class TestPage(Resource): isLeaf = True def render_GET(self, request): if request.uri == b"/404": request.setResponseCode(404) return b"404" elif request.uri == b"/header": return b"".join(request.requestHeaders.getRawHeaders(b"X-Test")) return b"OK" class TestHTTPStep(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): timeout = 3 # those tests should not run long def setUp(self): self.setUpTestReactor() if txrequests is None: raise unittest.SkipTest( "Need to install txrequests to test http steps") # ignore 'http_proxy' environment variable when running tests session = http.getSession() session.trust_env = False # port 0 means random unused port self.listener = reactor.listenTCP(0, Site(TestPage())) self.port = self.listener.getHost().port return self.setUpBuildStep() def tearDown(self): http.closeSession() d = self.listener.stopListening() d.addBoth(lambda x: self.tearDownBuildStep()) return d def getURL(self, path=""): return "http://127.0.0.1:%d/%s" % (self.port, path) def test_basic(self): url = self.getURL() self.setupStep(http.GET(url)) self.expectLogfile( 'log', "URL: %s\nStatus: 200\n ------ Content ------\nOK" % (url, )) self.expectLogfile('content', "OK") self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() def test_404(self): url = self.getURL("404") self.setupStep(http.GET(url)) self.expectLogfile( 'log', "URL: %s\n ------ Content ------\n404" % (url, )) self.expectLogfile('content', "404") self.expectOutcome( result=FAILURE, state_string="Status code: 404 (failure)") return self.runStep() def test_POST(self): url = self.getURL("POST") self.setupStep(http.POST(url)) self.expectOutcome( result=FAILURE, state_string="Status code: 405 (failure)") return self.runStep() def test_header(self): url = self.getURL("header") self.setupStep(http.GET(url, headers={"X-Test": "True"})) self.expectLogfile( 'log', "URL: %s\nStatus: 200\n ------ Content ------\nTrue" % (url, )) self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() def test_params_renderable(self): url = self.getURL() self.setupStep(http.GET(url, params=properties.Property("x"))) self.properties.setProperty( 'x', {'param_1': 'param_1', 'param_2': 2}, 'here') self.expectLogfile( 'log', "URL: %s?param_1=param_1¶m_2=2\nStatus: 200\n ------ Content ------\nOK" % (url, )) self.expectLogfile('content', "OK") self.expectOutcome(result=SUCCESS, state_string="Status code: 200") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_master.py000066400000000000000000000302071361162603000245020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import pprint import sys from twisted.internet import error from twisted.internet import reactor from twisted.python import failure from twisted.python import runtime from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.process.properties import Property from buildbot.process.properties import WithProperties from buildbot.process.properties import renderer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import master from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin _COMSPEC_ENV = 'COMSPEC' class TestMasterShellCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() if runtime.platformType == 'win32': self.comspec = os.environ.get(_COMSPEC_ENV) os.environ[_COMSPEC_ENV] = r'C:\WINDOWS\system32\cmd.exe' return self.setUpBuildStep() def tearDown(self): if runtime.platformType == 'win32': if self.comspec: os.environ[_COMSPEC_ENV] = self.comspec else: del os.environ[_COMSPEC_ENV] return self.tearDownBuildStep() def patchSpawnProcess(self, exp_cmd, exp_argv, exp_path, exp_usePTY, exp_env, outputs): def spawnProcess(pp, cmd, argv, path, usePTY, env): self.assertEqual([cmd, argv, path, usePTY, env], [exp_cmd, exp_argv, exp_path, exp_usePTY, exp_env]) for output in outputs: if output[0] == 'out': pp.outReceived(output[1]) elif output[0] == 'err': pp.errReceived(output[1]) elif output[0] == 'rc': if output[1] != 0: so = error.ProcessTerminated(exitCode=output[1]) else: so = error.ProcessDone(None) pp.processEnded(failure.Failure(so)) self.patch(reactor, 'spawnProcess', spawnProcess) def test_real_cmd(self): cmd = [sys.executable, '-c', 'print("hello")'] self.setupStep( master.MasterShellCommand(command=cmd)) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello\r\n") else: self.expectLogfile('stdio', "hello\n") self.expectOutcome(result=SUCCESS, state_string="Ran") return self.runStep() def test_real_cmd_interrupted(self): cmd = [sys.executable, '-c', 'while True: pass'] self.setupStep( master.MasterShellCommand(command=cmd)) self.expectLogfile('stdio', "") if runtime.platformType == 'win32': # windows doesn't have signals, so we don't get 'killed', # but the "exception" part still works. self.expectOutcome(result=EXCEPTION, state_string="failed (1) (exception)") else: self.expectOutcome(result=EXCEPTION, state_string="killed (9) (exception)") d = self.runStep() self.step.interrupt("KILL") return d def test_real_cmd_fails(self): cmd = [sys.executable, '-c', 'import sys; sys.exit(1)'] self.setupStep( master.MasterShellCommand(command=cmd)) self.expectLogfile('stdio', "") self.expectOutcome(result=FAILURE, state_string="failed (1) (failure)") return self.runStep() def test_constr_args(self): self.setupStep( master.MasterShellCommand(description='x', descriptionDone='y', env={'a': 'b'}, workdir='build', usePTY=True, command='true')) if runtime.platformType == 'win32': exp_argv = [r'C:\WINDOWS\system32\cmd.exe', '/c', 'true'] else: exp_argv = ['/bin/sh', '-c', 'true'] self.patchSpawnProcess( exp_cmd=exp_argv[0], exp_argv=exp_argv, exp_path='build', exp_usePTY=True, exp_env={'a': 'b'}, outputs=[ ('out', 'hello!\n'), ('err', 'world\n'), ('rc', 0), ]) self.expectOutcome(result=SUCCESS, state_string='y') return self.runStep() def test_env_subst(self): cmd = [sys.executable, '-c', 'import os; print(os.environ["HELLO"])'] os.environ['WORLD'] = 'hello' self.setupStep( master.MasterShellCommand(command=cmd, env={'HELLO': '${WORLD}'})) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello\r\n") else: self.expectLogfile('stdio', "hello\n") self.expectOutcome(result=SUCCESS) d = self.runStep() @d.addBoth def _restore_env(res): del os.environ['WORLD'] return res return d def test_env_list_subst(self): cmd = [sys.executable, '-c', 'import os; print(os.environ["HELLO"])'] os.environ['WORLD'] = 'hello' os.environ['LIST'] = 'world' self.setupStep( master.MasterShellCommand(command=cmd, env={'HELLO': ['${WORLD}', '${LIST}']})) if runtime.platformType == 'win32': self.expectLogfile('stdio', "hello;world\r\n") else: self.expectLogfile('stdio', "hello:world\n") self.expectOutcome(result=SUCCESS) d = self.runStep() @d.addBoth def _restore_env(res): del os.environ['WORLD'] del os.environ['LIST'] return res return d def test_prop_rendering(self): cmd = [sys.executable, '-c', WithProperties( 'import os; print("%s"); print(os.environ[\"BUILD\"])', 'project')] self.setupStep( master.MasterShellCommand(command=cmd, env={'BUILD': WithProperties('%s', "project")})) self.properties.setProperty("project", "BUILDBOT-TEST", "TEST") if runtime.platformType == 'win32': self.expectLogfile('stdio', "BUILDBOT-TEST\r\nBUILDBOT-TEST\r\n") else: self.expectLogfile('stdio', "BUILDBOT-TEST\nBUILDBOT-TEST\n") self.expectOutcome(result=SUCCESS) return self.runStep() def test_constr_args_descriptionSuffix(self): self.setupStep( master.MasterShellCommand(description='x', descriptionDone='y', descriptionSuffix='z', env={'a': 'b'}, workdir='build', usePTY=True, command='true')) if runtime.platformType == 'win32': exp_argv = [r'C:\WINDOWS\system32\cmd.exe', '/c', 'true'] else: exp_argv = ['/bin/sh', '-c', 'true'] self.patchSpawnProcess( exp_cmd=exp_argv[0], exp_argv=exp_argv, exp_path='build', exp_usePTY=True, exp_env={'a': 'b'}, outputs=[ ('out', 'hello!\n'), ('err', 'world\n'), ('rc', 0), ]) self.expectOutcome(result=SUCCESS, state_string='y z') return self.runStep() class TestSetProperty(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(master.SetProperty(property="testProperty", value=Interpolate( "sch=%(prop:scheduler)s, worker=%(prop:workername)s"))) self.properties.setProperty( 'scheduler', 'force', source='SetProperty', runtime=True) self.properties.setProperty( 'workername', 'testWorker', source='SetProperty', runtime=True) self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty( 'testProperty', 'sch=force, worker=testWorker', source='SetProperty') return self.runStep() class TestLogRenderable(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(master.LogRenderable( content=Interpolate('sch=%(prop:scheduler)s, worker=%(prop:workername)s'))) self.properties.setProperty( 'scheduler', 'force', source='TestSetProperty', runtime=True) self.properties.setProperty( 'workername', 'testWorker', source='TestSetProperty', runtime=True) self.expectOutcome(result=SUCCESS, state_string='Logged') self.expectLogfile( 'Output', pprint.pformat('sch=force, worker=testWorker')) return self.runStep() class TestsSetProperties(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def doOneTest(self, **kwargs): # all three tests should create a 'a' property with 'b' value, all with different # more or less dynamic methods self.setupStep( master.SetProperties(name="my-step", **kwargs)) self.expectProperty('a', 'b', 'my-step') self.expectOutcome(result=SUCCESS, state_string='Properties Set') return self.runStep() def test_basic(self): return self.doOneTest(properties={'a': 'b'}) def test_renderable(self): return self.doOneTest(properties={'a': Interpolate("b")}) def test_renderer(self): @renderer def manipulate(props): # the renderer returns renderable! return {'a': Interpolate('b')} return self.doOneTest(properties=manipulate) class TestAssert(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_eq_pass(self): self.setupStep(master.Assert( Property("test_prop") == "foo")) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=SUCCESS) return self.runStep() def test_eq_fail(self): self.setupStep(master.Assert( Property("test_prop") == "bar")) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=FAILURE) return self.runStep() def test_renderable_pass(self): @renderer def test_renderer(props): return props.getProperty("test_prop") == "foo" self.setupStep(master.Assert(test_renderer)) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=SUCCESS) return self.runStep() def test_renderable_fail(self): @renderer def test_renderer(props): return props.getProperty("test_prop") == "bar" self.setupStep(master.Assert(test_renderer)) self.properties.setProperty("test_prop", "foo", "bar") self.expectOutcome(result=FAILURE) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_maxq.py000066400000000000000000000051261361162603000241570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import maxq from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestShellCommandExecution(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_testdir_required(self): with self.assertRaises(config.ConfigErrors): maxq.MaxQ() def test_success(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command="run_maxq.py x") + ExpectShell.log('stdio', stdout='no failures\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string='success') return self.runStep() def test_nonzero_rc_no_failures(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command="run_maxq.py x") + ExpectShell.log('stdio', stdout='no failures\n') + 2 ) self.expectOutcome(result=FAILURE, state_string='1 maxq failures') return self.runStep() def test_failures(self): self.setupStep( maxq.MaxQ(testdir='x')) self.expectCommands( ExpectShell(workdir='wkdir', command="run_maxq.py x") + ExpectShell.log('stdio', stdout='\nTEST FAILURE: foo\n' * 10) + 2 ) self.expectOutcome(result=FAILURE, state_string='10 maxq failures') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_mswin.py000066400000000000000000000122401361162603000243410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.process.results import Results from buildbot.steps import mswin from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestRobocopySimple(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): """ Test L{Robocopy} command building. """ def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def _run_simple_test(self, source, destination, expected_args=None, expected_code=0, expected_res=SUCCESS, **kwargs): s = mswin.Robocopy(source, destination, **kwargs) self.setupStep(s) s.rendered = True command = ['robocopy', source, destination] if expected_args: command += expected_args command += ['/TEE', '/NP'] self.expectCommands( ExpectShell( workdir='wkdir', command=command, ) + expected_code ) state_string = "'robocopy %s ...'" % source if expected_res != SUCCESS: state_string += ' (%s)' % (Results[expected_res]) self.expectOutcome(result=expected_res, state_string=state_string) return self.runStep() def test_copy(self): return self._run_simple_test(r'D:\source', r'E:\dest') def test_copy_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['a.txt', 'b.txt', '*.log'], expected_args=['a.txt', 'b.txt', '*.log'] ) def test_copy_recursive(self): return self._run_simple_test( r'D:\source', r'E:\dest', recursive=True, expected_args=['/E'] ) def test_mirror_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], mirror=True, expected_args=['*.foo', '/MIR'] ) def test_move_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], move=True, expected_args=['*.foo', '/MOVE'] ) def test_exclude(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude=['*.foo', '*.bar'], expected_args=['blah*', '/XF', '*.foo', '*.bar'] ) def test_exclude_files(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude_files=['*.foo', '*.bar'], expected_args=['blah*', '/XF', '*.foo', '*.bar'] ) def test_exclude_dirs(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['blah*'], exclude_dirs=['foo', 'bar'], expected_args=['blah*', '/XD', 'foo', 'bar'] ) def test_custom_opts(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], custom_opts=['/R:10', '/W:60'], expected_args=['*.foo', '/R:10', '/W:60'] ) def test_verbose_output(self): return self._run_simple_test( r'D:\source', r'E:\dest', files=['*.foo'], verbose=True, expected_args=['*.foo', '/V', '/TS', '/FP'] ) @defer.inlineCallbacks def test_codes(self): # Codes that mean uneventful copies (including no copy at all). for i in [0, 1]: yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=SUCCESS ) # Codes that mean some mismatched or extra files were found. for i in range(2, 8): yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=WARNINGS ) # Codes that mean errors have been encountered. for i in range(8, 32): yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=i, expected_res=FAILURE ) # bit 32 is meaningless yield self._run_simple_test( r'D:\source', r'E:\dest', expected_code=32, expected_res=EXCEPTION ) buildbot-2.6.0/master/buildbot/test/unit/test_steps_mtrlogobserver.py000066400000000000000000000076371361162603000262760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.enterprise import adbapi from twisted.trial import unittest from buildbot.steps import mtrlogobserver class TestEqConnectionPool(unittest.TestCase): # # Test buildbot.steps.mtrlogobserver.EqConnectionPool class # def setUp(self): # patch adbapi.ConnectionPool constructor to do nothing self.patch(adbapi.ConnectionPool, "__init__", mock.Mock()) def testEqSameInstance(self): # Test using '==' operator on same EqConnectionPool instance pool = mtrlogobserver.EqConnectionPool("MySQLdb", "host", "buildbot", "password", "db") self.assertTrue(pool == pool) def testEqSameArgs(self): # Test using '==' operator on two EqConnectionPool instances with # same arguments pool1 = mtrlogobserver.EqConnectionPool("MySQLdb", "host", "buildbot", "password", "db", extra="dummy") pool2 = mtrlogobserver.EqConnectionPool("MySQLdb", "host", "buildbot", "password", "db", extra="dummy") self.assertTrue(pool1 == pool2) def testEqDiffArgs(self): # Test using '==' operator on two EqConnectionPool instances with # different arguments pool1 = mtrlogobserver.EqConnectionPool("DummyDb1") pool2 = mtrlogobserver.EqConnectionPool("DummyDb2") self.assertFalse(pool1 == pool2) def testEqDiffType(self): # Test using '==' operator on an EqConnectionPool instance and object # of different type pool = mtrlogobserver.EqConnectionPool("DummyDb1") self.assertFalse(pool == object()) def testNeSameInstance(self): # Test using '!=' operator on same EqConnectionPool instance pool = mtrlogobserver.EqConnectionPool("DummyDb1") self.assertFalse(pool != pool) def testNeSameArgs(self): # Test using '!=' operator on two EqConnectionPool instances with same # arguments pool1 = mtrlogobserver.EqConnectionPool("DummyDb1", "x", y="z") pool2 = mtrlogobserver.EqConnectionPool("DummyDb1", "x", y="z") self.assertFalse(pool1 != pool2) def testNeDiffArgs(self): # Test using '!=' operator on two EqConnectionPool instances with # different arguments pool1 = mtrlogobserver.EqConnectionPool("DummyDb1") pool2 = mtrlogobserver.EqConnectionPool("DummyDb2") self.assertTrue(pool1 != pool2) def testNeDiffType(self): # Test using '!=' operator on an EqConnectionPool instance and object # of different type pool = mtrlogobserver.EqConnectionPool("DummyDb1") self.assertTrue(pool != object()) buildbot-2.6.0/master/buildbot/test/unit/test_steps_package_deb_lintian.py000066400000000000000000000043371361162603000271370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.results import SUCCESS from buildbot.steps.package.deb import lintian from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDebLintian(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_fileloc(self): with self.assertRaises(config.ConfigErrors): lintian.DebLintian() def test_success(self): self.setupStep(lintian.DebLintian('foo_0.23_i386.changes')) self.expectCommands( ExpectShell(workdir='wkdir', command=['lintian', '-v', 'foo_0.23_i386.changes']) + 0) self.expectOutcome(result=SUCCESS, state_string="Lintian") return self.runStep() def test_success_suppressTags(self): self.setupStep(lintian.DebLintian('foo_0.23_i386.changes', suppressTags=['bad-distribution-in-changes-file'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['lintian', '-v', 'foo_0.23_i386.changes', '--suppress-tags', 'bad-distribution-in-changes-file']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_package_deb_pbuilder.py000066400000000000000000000456031361162603000273100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat import time from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.package.deb import pbuilder from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestDebPbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() def test_update(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + Expect.update('stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--update', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', ]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_buildonly_and_property(self): self.setupStep(pbuilder.DebPbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + Expect.update( 'stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + ExpectShell.log( 'stdio', stdout='blah\ndpkg-genchanges >../somefilename.changes\foo\n') + 0) self.expectOutcome(result=SUCCESS) self.expectProperty('deb-changes', 'somefilename.changes', 'DebPbuilder') return self.runStep() def test_architecture(self): self.setupStep(pbuilder.DebPbuilder(architecture='amd64')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-amd64-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--architecture', 'amd64']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--architecture', 'amd64', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-amd64-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_distribution(self): self.setupStep(pbuilder.DebPbuilder(distribution='woody')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/woody-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/woody-local-buildbot.tgz', '--distribution', 'woody', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/woody-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_basetgz(self): self.setupStep(pbuilder.DebPbuilder( basetgz='/buildbot/%(distribution)s-%(architecture)s.tgz')) self.expectCommands( Expect('stat', {'file': '/buildbot/stable-local.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/buildbot/stable-local.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/buildbot/stable-local.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mirror(self): self.setupStep(pbuilder.DebPbuilder(mirror='http://apt:9999/debian')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://apt:9999/debian']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_extrapackages(self): self.setupStep(pbuilder.DebPbuilder(extrapackages=['buildbot'])) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--extrapackages', 'buildbot']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--extrapackages', 'buildbot']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_keyring(self): self.setupStep(pbuilder.DebPbuilder(keyring='/builbot/buildbot.gpg')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--debootstrapopts', '--keyring=/builbot/buildbot.gpg']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_components(self): self.setupStep(pbuilder.DebPbuilder(components='main universe')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/stable-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() class TestDebCowbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--create', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/', '--distribution', 'stable', '--mirror', 'http://cdn.debian.net/debian/']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_update(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + Expect.update('stat', [stat.S_IFDIR, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--update', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/', ]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_buildonly(self): self.setupStep(pbuilder.DebCowbuilder()) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow/'}) + Expect.update( 'stat', [stat.S_IFDIR, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_update_reg(self): self.setupStep(pbuilder.DebCowbuilder( basetgz='/var/cache/pbuilder/stable-local-buildbot.cow')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow'}) + Expect.update('stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, 0, 0]) + 0, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--update', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow']) + 1) self.expectOutcome(result=FAILURE, state_string='built (failure)') return self.runStep() def test_buildonly_reg(self): self.setupStep(pbuilder.DebCowbuilder( basetgz='/var/cache/pbuilder/stable-local-buildbot.cow')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/stable-local-buildbot.cow'}) + Expect.update( 'stat', [stat.S_IFREG, 99, 99, 1, 0, 0, 99, 0, int(time.time()), 0]) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/stable-local-buildbot.cow']) + 1) self.expectOutcome(result=FAILURE, state_string='built (failure)') return self.runStep() class TestUbuPbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_distribution(self): with self.assertRaises(config.ConfigErrors): pbuilder.UbuPbuilder() def test_new(self): self.setupStep(pbuilder.UbuPbuilder(distribution='oneiric')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/oneiric-local-buildbot.tgz'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/pbuilder', '--create', '--basetgz', '/var/cache/pbuilder/oneiric-local-buildbot.tgz', '--distribution', 'oneiric', '--mirror', 'http://archive.ubuntu.com/ubuntu/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/pbuilder', '--', '--buildresult', '.', '--basetgz', '/var/cache/pbuilder/oneiric-local-buildbot.tgz']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() class TestUbuCowbuilder(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_distribution(self): with self.assertRaises(config.ConfigErrors): pbuilder.UbuCowbuilder() def test_new(self): self.setupStep(pbuilder.UbuCowbuilder(distribution='oneiric')) self.expectCommands( Expect( 'stat', {'file': '/var/cache/pbuilder/oneiric-local-buildbot.cow/'}) + 1, ExpectShell(workdir='wkdir', command=['sudo', '/usr/sbin/cowbuilder', '--create', '--basepath', '/var/cache/pbuilder/oneiric-local-buildbot.cow/', '--distribution', 'oneiric', '--mirror', 'http://archive.ubuntu.com/ubuntu/', '--components', 'main universe']) + 0, ExpectShell(workdir='wkdir', command=['pdebuild', '--buildresult', '.', '--pbuilder', '/usr/sbin/cowbuilder', '--', '--buildresult', '.', '--basepath', '/var/cache/pbuilder/oneiric-local-buildbot.cow/']) + 0) self.expectOutcome(result=SUCCESS, state_string='built') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_package_rpm_mock.py000066400000000000000000000143301361162603000264700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import mock from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestMock(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_root(self): with self.assertRaises(config.ConfigErrors): mock.Mock() def test_class_attrs(self): step = self.setupStep(mock.Mock(root='TESTROOT')) self.assertEqual(step.command, ['mock', '--root', 'TESTROOT']) def test_success(self): self.setupStep(mock.Mock(root='TESTROOT')) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log']}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'}) + 0) self.expectOutcome(result=SUCCESS, state_string="'mock --root ...'") return self.runStep() def test_resultdir_success(self): self.setupStep(mock.Mock(root='TESTROOT', resultdir='RESULT')) self.expectCommands( Expect('rmdir', {'dir': ['build/RESULT/build.log', 'build/RESULT/root.log', 'build/RESULT/state.log']}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--resultdir', 'RESULT'], logfiles={'build.log': 'RESULT/build.log', 'root.log': 'RESULT/root.log', 'state.log': 'RESULT/state.log'}) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_resultdir_renderable(self): resultdir_text = "RESULT" self.setupStep(mock.Mock(root='TESTROOT', resultdir=Interpolate( '%(kw:resultdir)s', resultdir=resultdir_text))) self.expectCommands( Expect('rmdir', {'dir': ['build/RESULT/build.log', 'build/RESULT/root.log', 'build/RESULT/state.log']}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--resultdir', 'RESULT'], logfiles={'build.log': 'RESULT/build.log', 'root.log': 'RESULT/root.log', 'state.log': 'RESULT/state.log'}) + 0) self.expectOutcome(result=SUCCESS, state_string="'mock --root ...'") return self.runStep() class TestMockBuildSRPM(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_spec(self): with self.assertRaises(config.ConfigErrors): mock.MockBuildSRPM(root='TESTROOT') def test_success(self): self.setupStep(mock.MockBuildSRPM(root='TESTROOT', spec="foo.spec")) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log']}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--buildsrpm', '--spec', 'foo.spec', '--sources', '.'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'},) + 0) self.expectOutcome(result=SUCCESS, state_string='mock buildsrpm') return self.runStep() class TestMockRebuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_srpm(self): with self.assertRaises(config.ConfigErrors): mock.MockRebuild(root='TESTROOT') def test_success(self): self.setupStep(mock.MockRebuild(root='TESTROOT', srpm="foo.src.rpm")) self.expectCommands( Expect('rmdir', {'dir': ['build/build.log', 'build/root.log', 'build/state.log']}) + 0, ExpectShell(workdir='wkdir', command=['mock', '--root', 'TESTROOT', '--rebuild', 'foo.src.rpm'], logfiles={'build.log': 'build.log', 'root.log': 'root.log', 'state.log': 'state.log'},) + 0) self.expectOutcome(result=SUCCESS, state_string='mock rebuild srpm') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_package_rpm_rpmbuild.py000066400000000000000000000124041361162603000273550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import OrderedDict from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import rpmbuild from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class RpmBuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_specfile(self): with self.assertRaises(config.ConfigErrors): rpmbuild.RpmBuild() def test_success(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", dist=".el5")) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define "_specdir ' '`pwd`" --define "_srcrpmdir `pwd`" --define "dist .el5" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='lalala') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_autoRelease(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", autoRelease=True)) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir `pwd`" ' '--define "_sourcedir `pwd`" --define "_specdir `pwd`" ' '--define "_srcrpmdir `pwd`" --define "_release 0" ' '--define "dist .el6" -ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_define(self): defines = [("a", "1"), ("b", "2")] self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", define=OrderedDict(defines))) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define ' '"_specdir `pwd`" --define "_srcrpmdir `pwd`" ' '--define "a 1" --define "b 2" --define "dist .el6" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() def test_define_none(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", define=None)) self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define ' '"_specdir `pwd`" --define "_srcrpmdir `pwd`" ' '--define "dist .el6" -ba foo.spec') + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') return self.runStep() @defer.inlineCallbacks def test_renderable_dist(self): self.setupStep(rpmbuild.RpmBuild(specfile="foo.spec", dist=Interpolate('%(prop:renderable_dist)s'))) self.properties.setProperty('renderable_dist', '.el7', 'test') self.expectCommands( ExpectShell(workdir='wkdir', command='rpmbuild --define "_topdir ' '`pwd`" --define "_builddir `pwd`" --define "_rpmdir ' '`pwd`" --define "_sourcedir `pwd`" --define "_specdir ' '`pwd`" --define "_srcrpmdir `pwd`" --define "dist .el7" ' '-ba foo.spec') + ExpectShell.log('stdio', stdout='lalala') + 0) self.expectOutcome(result=SUCCESS, state_string='RPMBUILD') yield self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_package_rpm_rpmlint.py000066400000000000000000000042421361162603000272250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.package.rpm import rpmlint from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestRpmLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(rpmlint.RpmLint()) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', '.']) + 0) self.expectOutcome( result=SUCCESS, state_string='Finished checking RPM/SPEC issues') return self.runStep() def test_fileloc_success(self): self.setupStep(rpmlint.RpmLint(fileloc='RESULT')) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', 'RESULT']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() def test_config_success(self): self.setupStep(rpmlint.RpmLint(config='foo.cfg')) self.expectCommands( ExpectShell(workdir='wkdir', command=['rpmlint', '-i', '-f', 'foo.cfg', '.']) + 0) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_python.py000066400000000000000000000547301361162603000245370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import python from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin log_output_success = '''\ Making output directory... Running Sphinx v1.0.7 loading pickled environment... not yet created No builder selected, using default: html building [html]: targets for 24 source files that are out of date updating environment: 24 added, 0 changed, 0 removed reading sources... [ 4%] index reading sources... [ 8%] manual/cfg-builders ... copying static files... done dumping search index... done dumping object inventory... done build succeeded. ''' log_output_nochange = '''\ Running Sphinx v1.0.7 loading pickled environment... done No builder selected, using default: html building [html]: targets for 0 source files that are out of date updating environment: 0 added, 0 changed, 0 removed looking for now-outdated files... none found no targets are out of date. ''' log_output_warnings = '''\ Running Sphinx v1.0.7 loading pickled environment... done building [html]: targets for 1 source files that are out of date updating environment: 0 added, 1 changed, 0 removed reading sources... [100%] file file.rst:18: (WARNING/2) Literal block expected; none found. looking for now-outdated files... none found pickling environment... done checking consistency... done preparing documents... done writing output... [ 50%] index writing output... [100%] file index.rst:: WARNING: toctree contains reference to document 'preamble' that \ doesn't have a title: no link will be generated writing additional files... search copying static files... done dumping search index... done dumping object inventory... done build succeeded, 2 warnings.''' log_output_warnings_strict = '''\ Running Sphinx v1.0.7 loading pickled environment... done building [html]: targets for 1 source files that are out of date updating environment: 0 added, 1 changed, 0 removed reading sources... [100%] file Warning, treated as error: file.rst:18:Literal block expected; none found. ''' warnings = '''\ file.rst:18: (WARNING/2) Literal block expected; none found. index.rst:: WARNING: toctree contains reference to document 'preamble' that \ doesn't have a title: no link will be generated\ ''' # this is from a run of epydoc against the buildbot source.. epydoc_output = '''\ [............... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/ | ec2.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named boto (line 19) | [.... Warning: Unable to extract the base list for twisted.web.resource.EncodingResourceWrapper: Bad dotted name [...... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/worker/ | ec2.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named boto (line 28) | [........... +--------------------------------------------------------------------- | In /home/dustin/code/buildbot/t/buildbot/master/buildbot/status/ | status_push.py: | Import failed (but source code parsing was successful). | Error: ImportError: No module named status_json (line 40) | [....................Special descriptor for class __provides__ ''' class BuildEPYDoc(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_sample(self): self.setupStep(python.BuildEPYDoc()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'epydocs']) + ExpectShell.log('stdio', stdout=epydoc_output) + 1, ) self.expectOutcome(result=FAILURE, state_string='epydoc warn=1 err=3 (failure)') return self.runStep() class PyLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log('stdio', stdout='Your code has been rated at 10/10') + python.PyLint.RC_OK) self.expectOutcome(result=SUCCESS, state_string='pylint') return self.runStep() def test_error(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'E: 12: Undefined variable \'foo\'\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_ERROR)) self.expectOutcome(result=FAILURE, state_string='pylint error=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-error', 1) return self.runStep() def test_header_output(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', header='W: 11: Bad indentation. Found 6 spaces, expected 4\n') + 0) self.expectOutcome(result=SUCCESS, state_string='pylint') return self.runStep() def test_failure(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'F: 13: something really strange happened\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_FATAL)) self.expectOutcome(result=FAILURE, state_string='pylint fatal=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-fatal', 1) return self.runStep() def test_failure_zero_returncode(self): # Make sure that errors result in a failed step when pylint's # return code is 0, e.g. when run through a wrapper script. self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'E: 12: Undefined variable \'foo\'\n')) + 0) self.expectOutcome(result=FAILURE, state_string='pylint error=1 warning=1 (failure)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-error', 1) return self.runStep() def test_regex_text(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11: Bad indentation. Found 6 spaces, expected 4\n' 'C: 1:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_0_24(self): # pylint >= 0.24.0 prints out column offsets when using text format self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11,0: Bad indentation. Found 6 spaces, expected 4\n' 'C: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_131(self): # at least pylint 1.3.1 prints out space padded column offsets when # using text format self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W: 11, 0: Bad indentation. Found 6 spaces, expected 4\n' 'C: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_ids(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W0311: 11: Bad indentation.\n' 'C0111: 1:funcName: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_text_ids_0_24(self): # pylint >= 0.24.0 prints out column offsets when using text format self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('W0311: 11,0: Bad indentation.\n' 'C0111: 3,10:foo123: Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_parseable_ids(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('test.py:9: [W0311] Bad indentation.\n' 'test.py:3: [C0111, foo123] Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_parseable(self): self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('test.py:9: [W] Bad indentation.\n' 'test.py:3: [C, foo123] Missing docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() def test_regex_parseable_131(self): """ In pylint 1.3.1, output parseable is deprecated, but looks like that, this is also the new recommended format string: --msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} """ self.setupStep(python.PyLint(command=['pylint'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['pylint']) + ExpectShell.log( 'stdio', stdout=('test.py:9: [W0311(bad-indentation), ] Bad indentation. Found 6 spaces, expected 4\n' 'test.py:3: [C0111(missing-docstring), myFunc] Missing function docstring\n')) + (python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)) self.expectOutcome(result=WARNINGS, state_string='pylint convention=1 warning=1 (warnings)') self.expectProperty('pylint-warning', 1) self.expectProperty('pylint-convention', 1) self.expectProperty('pylint-total', 2) return self.runStep() class PyFlakes(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + 0) self.expectOutcome(result=SUCCESS, state_string='pyflakes') return self.runStep() def test_content_in_header(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', # don't match pyflakes-like output in the header header="foo.py:1: 'bar' imported but unused\n") + 0) self.expectOutcome(result=0, state_string='pyflakes') return self.runStep() def test_unused(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: 'bar' imported but unused\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes unused=1 (warnings)') self.expectProperty('pyflakes-unused', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_undefined(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: undefined name 'bar'\n") + 1) self.expectOutcome(result=FAILURE, state_string='pyflakes undefined=1 (failure)') self.expectProperty('pyflakes-undefined', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_redefs(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:2: redefinition of unused 'foo' from line 1\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes redefs=1 (warnings)') self.expectProperty('pyflakes-redefs', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_importstar(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:1: 'from module import *' used; unable to detect undefined names\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes import*=1 (warnings)') self.expectProperty('pyflakes-import*', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() def test_misc(self): self.setupStep(python.PyFlakes()) self.expectCommands( ExpectShell(workdir='wkdir', command=['make', 'pyflakes']) + ExpectShell.log( 'stdio', stdout="foo.py:2: redefinition of function 'bar' from line 1\n") + 1) self.expectOutcome(result=WARNINGS, state_string='pyflakes misc=1 (warnings)') self.expectProperty('pyflakes-misc', 1) self.expectProperty('pyflakes-total', 1) return self.runStep() class TestSphinx(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_builddir_required(self): with self.assertRaises(config.ConfigErrors): python.Sphinx() def test_bad_mode(self): with self.assertRaises(config.ConfigErrors): python.Sphinx(sphinx_builddir="_build", mode="don't care") def test_success(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_success) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() def test_failure(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout='oh noes!') + 1 ) self.expectOutcome(result=FAILURE, state_string="sphinx 0 warnings (failure)") return self.runStep() def test_strict_warnings(self): self.setupStep(python.Sphinx(sphinx_builddir="_build", strict_warnings=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '-W', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_warnings_strict) + 1 ) self.expectOutcome(result=FAILURE, state_string="sphinx 1 warnings (failure)") return self.runStep() def test_nochange(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_nochange) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() @defer.inlineCallbacks def test_warnings(self): self.setupStep(python.Sphinx(sphinx_builddir="_build")) self.expectCommands( ExpectShell(workdir='wkdir', command=['sphinx-build', '.', '_build']) + ExpectShell.log('stdio', stdout=log_output_warnings) + 0 ) self.expectOutcome(result=WARNINGS, state_string="sphinx 2 warnings (warnings)") self.expectLogfile("warnings", warnings) yield self.runStep() self.assertEqual(self.step.statistics, {'warnings': 2}) def test_constr_args(self): self.setupStep(python.Sphinx(sphinx_sourcedir='src', sphinx_builddir="bld", sphinx_builder='css', sphinx="/path/to/sphinx-build", tags=['a', 'b'], strict_warnings=True, defines=dict( empty=None, t=True, f=False, s="str"), mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['/path/to/sphinx-build', '-b', 'css', '-t', 'a', '-t', 'b', '-D', 'empty', '-D', 'f=0', '-D', 's=str', '-D', 't=1', '-E', '-W', 'src', 'bld']) + ExpectShell.log('stdio', stdout=log_output_success) + 0 ) self.expectOutcome(result=SUCCESS, state_string="sphinx 0 warnings") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_python_twisted.py000066400000000000000000000350131361162603000262730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.trial import unittest from buildbot.process.properties import Property from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import python_twisted from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin failureLog = '''\ buildbot.test.unit.test_steps_python_twisted.Trial.testProperties ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_plural ... [FAILURE] buildbot.test.unit.test_steps_python_twisted.Trial.test_run_singular ... [FAILURE] =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['2 tests', 'passed']} b = {'result': 0, 'status_text': ['2 tests', 'passed']} buildbot.test.unit.test_steps_python_twisted.Trial.testProperties buildbot.test.unit.test_steps_python_twisted.Trial.test_run_plural =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['no tests', 'run']} b = {'result': 0, 'status_text': ['no tests', 'run']} buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement =============================================================================== [FAIL] Traceback (most recent call last): File "/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/util/steps.py", line 244, in check "expected step outcome") File "/home/dustin/code/buildbot/t/buildbot/sandbox/lib/python2.7/site-packages/twisted/trial/_synctest.py", line 356, in assertEqual % (msg, pformat(first), pformat(second))) twisted.trial.unittest.FailTest: expected step outcome not equal: a = {'result': 3, 'status_text': ['1 test', 'passed']} b = {'result': 0, 'status_text': ['1 test', 'passed']} buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties buildbot.test.unit.test_steps_python_twisted.Trial.test_run_singular ------------------------------------------------------------------------------- Ran 8 tests in 0.101s FAILED (failures=8) ''' # noqa: max-line-length class Trial(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_env(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None, env={'PYTHONPATH': 'somepath'})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH='somepath')) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_env_supplement(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath='path1', env={'PYTHONPATH': ['path2', 'path3']})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH=['path1', 'path2', 'path3'])) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_env_nodupe(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath='path2', env={'PYTHONPATH': ['path1', 'path2']})) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}, env=dict(PYTHONPATH=['path1', 'path2'])) + ExpectShell.log('stdio', stdout="Ran 0 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='no tests run') return self.runStep() def test_run_singular(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() def test_run_plural(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_run_failure(self): self.setupStep( python_twisted.Trial(workdir='build', tests='testname', testpath=None)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout=failureLog) + 1 ) self.expectOutcome( result=FAILURE, state_string='tests 8 failures (failure)') self.expectLogfile('problems', failureLog.split('\n\n', 1)[1][:-1]) self.expectLogfile('warnings', textwrap.dedent('''\ buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_nodupe ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_env_supplement ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobs ... [FAILURE]/home/dustin/code/buildbot/t/buildbot/master/buildbot/test/fake/logfile.py:92: UserWarning: step uses removed LogFile method `getText` buildbot.test.unit.test_steps_python_twisted.Trial.test_run_jobsProperties ... [FAILURE] ''')) # noqa: max-line-length return self.runStep() def testProperties(self): self.setupStep(python_twisted.Trial(workdir='build', tests=Property('test_list'), testpath=None)) self.properties.setProperty('test_list', ['testname'], 'Test') self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', 'testname'], logfiles={'test.log': '_trial_temp/test.log'}) + ExpectShell.log('stdio', stdout="Ran 2 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='2 tests passed') return self.runStep() def test_run_jobs(self): """ The C{jobs} kwarg should correspond to trial's -j option ( included since Twisted 12.3.0), and make corresponding changes to logfiles. """ self.setupStep(python_twisted.Trial(workdir='build', tests='testname', testpath=None, jobs=2)) self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--jobs=2', 'testname'], logfiles={ 'test.0.log': '_trial_temp/0/test.log', 'err.0.log': '_trial_temp/0/err.log', 'out.0.log': '_trial_temp/0/out.log', 'test.1.log': '_trial_temp/1/test.log', 'err.1.log': '_trial_temp/1/err.log', 'out.1.log': '_trial_temp/1/out.log', }) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() def test_run_jobsProperties(self): """ C{jobs} should accept Properties """ self.setupStep(python_twisted.Trial(workdir='build', tests='testname', jobs=Property('jobs_count'), testpath=None)) self.properties.setProperty('jobs_count', '2', 'Test') self.expectCommands( ExpectShell(workdir='build', command=['trial', '--reporter=bwverbose', '--jobs=2', 'testname'], logfiles={ 'test.0.log': '_trial_temp/0/test.log', 'err.0.log': '_trial_temp/0/err.log', 'out.0.log': '_trial_temp/0/out.log', 'test.1.log': '_trial_temp/1/test.log', 'err.1.log': '_trial_temp/1/err.log', 'out.1.log': '_trial_temp/1/out.log', }) + ExpectShell.log('stdio', stdout="Ran 1 tests\n") + 0 ) self.expectOutcome(result=SUCCESS, state_string='1 test passed') return self.runStep() class HLint(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_ok(self): self.setupStep(python_twisted.HLint(workdir='build'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=[ 'bin/lore', '-p', '--output', 'lint', 'foo.xhtml'],) + ExpectShell.log( 'stdio', stdout="dunno what hlint output looks like..\n") + 0 ) self.expectLogfile('files', 'foo.xhtml\n') self.expectOutcome(result=SUCCESS, state_string='hlint') return self.runStep() def test_run_warnings(self): self.setupStep(python_twisted.HLint(workdir='build'), buildFiles=['foo.xhtml']) self.expectCommands( ExpectShell(workdir='build', command=[ 'bin/lore', '-p', '--output', 'lint', 'foo.xhtml']) + ExpectShell.log('stdio', stdout="colon: meaning warning\n") + 0 ) self.expectLogfile('warnings', 'colon: meaning warning') self.expectOutcome(result=WARNINGS, state_string='hlint (warnings)') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_renderable.py000066400000000000000000000032451361162603000253140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.buildstep import BuildStep from buildbot.process.properties import Interpolate from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestBuildStep(BuildStep): def run(self): self.setProperty('name', self.name) return 0 class TestBuildStepNameIsRenderable(steps.BuildStepMixin, unittest.TestCase, TestReactorMixin, configmixin.ConfigErrorsMixin): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_name_is_renderable(self): step = TestBuildStep(name=Interpolate('%(kw:foo)s', foo='bar')) self.setupStep(step) self.expectProperty('name', 'bar') self.expectOutcome(0) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_shell.py000066400000000000000000001253571361162603000243310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re import textwrap from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import properties from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import shell from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestShellCommandExecution(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def assertLegacySummary(self, step, running, done=None): done = done or running self.assertEqual( (step._getLegacySummary(done=False), step._getLegacySummary(done=True)), (running, done)) def test_doStepIf_False(self): self.setupStep( shell.ShellCommand(command="echo hello", doStepIf=False)) self.expectOutcome(result=SKIPPED, state_string="'echo hello' (skipped)") return self.runStep() def test_constructor_args_kwargs(self): # this is an ugly way to define an API, but for now check that # the RemoteCommand arguments are properly passed on step = shell.ShellCommand(workdir='build', command="echo hello", want_stdout=0, logEnviron=False) self.assertEqual(step.remote_kwargs, dict(want_stdout=0, logEnviron=False, workdir='build', usePTY=None)) def test_constructor_args_validity(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "Invalid argument(s) passed to RemoteShellCommand: "): shell.ShellCommand(workdir='build', command="echo Hello World", wrongArg1=1, wrongArg2='two') def test_getLegacySummary_from_empty_command(self): # this is more of a regression test for a potential failure, really step = shell.ShellCommand(workdir='build', command=' ') step.rendered = True self.assertLegacySummary(step, None) def test_getLegacySummary_from_short_command(self): step = shell.ShellCommand(workdir='build', command="true") step.rendered = True self.assertLegacySummary(step, "'true'") def test_getLegacySummary_from_short_command_list(self): step = shell.ShellCommand(workdir='build', command=["true"]) step.rendered = True self.assertLegacySummary(step, "'true'") def test_getLegacySummary_from_med_command(self): step = shell.ShellCommand(command="echo hello") step.rendered = True self.assertLegacySummary(step, "'echo hello'") def test_getLegacySummary_from_med_command_list(self): step = shell.ShellCommand(command=["echo", "hello"]) step.rendered = True self.assertLegacySummary(step, "'echo hello'") def test_getLegacySummary_from_long_command(self): step = shell.ShellCommand(command="this is a long command") step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_from_long_command_list(self): step = shell.ShellCommand(command="this is a long command".split()) step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_from_nested_command_list(self): step = shell.ShellCommand(command=["this", ["is", "a"], "nested"]) step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_from_nested_command_tuples(self): step = shell.ShellCommand(command=["this", ("is", "a"), "nested"]) step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_from_nested_command_list_empty(self): step = shell.ShellCommand(command=["this", [], ["is", "a"], "nested"]) step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_from_nested_command_list_deep(self): step = shell.ShellCommand(command=[["this", [[["is", ["a"]]]]]]) step.rendered = True self.assertLegacySummary(step, "'this is ...'") def test_getLegacySummary_custom(self): step = shell.ShellCommand(command="echo hello", description=["echoing"], descriptionDone=["echoed"]) step.rendered = True self.assertLegacySummary(step, None) # handled by parent class def test_getLegacySummary_with_suffix(self): step = shell.ShellCommand( command="echo hello", descriptionSuffix="suffix") step.rendered = True self.assertLegacySummary(step, "'echo hello' suffix") def test_getLegacySummary_unrendered_WithProperties(self): step = shell.ShellCommand(command=properties.WithProperties('')) step.rendered = True self.assertLegacySummary(step, None) def test_getLegacySummary_unrendered_custom_new_style_class_renderable(self): step = shell.ShellCommand(command=object()) step.rendered = True self.assertLegacySummary(step, None) def test_getLegacySummary_unrendered_custom_old_style_class_renderable(self): class C: pass step = shell.ShellCommand(command=C()) step.rendered = True self.assertLegacySummary(step, None) def test_getLegacySummary_unrendered_WithProperties_list(self): step = shell.ShellCommand( command=['x', properties.WithProperties(''), 'y']) step.rendered = True self.assertLegacySummary(step, "'x y'") def test_run_simple(self): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello")) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + 0 ) self.expectOutcome(result=SUCCESS, state_string="'echo hello'") return self.runStep() def test_run_list(self): self.setupStep( shell.ShellCommand(workdir='build', command=['trial', '-b', '-B', 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_description(self): self.setupStep( shell.ShellCommand(workdir='build', command=properties.FlattenList( ['trial', ['-b', '-B'], 'buildbot.test']), descriptionDone=properties.FlattenList( ['test', ['done']]), descriptionSuffix=properties.FlattenList(['suff', ['ix']]))) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string='test done suff ix') return self.runStep() def test_run_nested_command(self): self.setupStep( shell.ShellCommand(workdir='build', command=['trial', ['-b', '-B'], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_deeply_command(self): self.setupStep( shell.ShellCommand(workdir='build', command=[['trial', ['-b', ['-B']]], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', '-B', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_nested_empty_command(self): self.setupStep( shell.ShellCommand(workdir='build', command=['trial', [], '-b', [], 'buildbot.test'])) self.expectCommands( ExpectShell(workdir='build', command=['trial', '-b', 'buildbot.test']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="'trial -b ...'") return self.runStep() def test_run_env(self): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello"), worker_env=dict(DEF='HERE')) self.expectCommands( ExpectShell(workdir='build', command='echo hello', env=dict(DEF='HERE')) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_env_override(self): self.setupStep( shell.ShellCommand(workdir='build', env={'ABC': '123'}, command="echo hello"), worker_env=dict(ABC='XXX', DEF='HERE')) self.expectCommands( ExpectShell(workdir='build', command='echo hello', env=dict(ABC='123', DEF='HERE')) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_usePTY(self): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello", usePTY=False)) self.expectCommands( ExpectShell(workdir='build', command='echo hello', usePTY=False) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_usePTY_old_worker(self): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello", usePTY=True), worker_version=dict(shell='1.1')) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_run_decodeRC(self, rc=1, results=WARNINGS, extra_text=" (warnings)"): self.setupStep( shell.ShellCommand(workdir='build', command="echo hello", decodeRC={1: WARNINGS})) self.expectCommands( ExpectShell(workdir='build', command='echo hello') + rc ) self.expectOutcome( result=results, state_string="'echo hello'" + extra_text) return self.runStep() def test_run_decodeRC_defaults(self): return self.test_run_decodeRC(2, FAILURE, extra_text=" (failure)") def test_run_decodeRC_defaults_0_is_failure(self): return self.test_run_decodeRC(0, FAILURE, extra_text=" (failure)") def test_missing_command_error(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "ShellCommand's `command' argument is not specified"): shell.ShellCommand() class TreeSize(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_run_success(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stdout='9292 .\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="treesize 9292 KiB") self.expectProperty('tree-size-KiB', 9292) return self.runStep() def test_run_misparsed(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stdio='abcdef\n') + 0 ) self.expectOutcome(result=WARNINGS, state_string="treesize unknown (warnings)") return self.runStep() def test_run_failed(self): self.setupStep(shell.TreeSize()) self.expectCommands( ExpectShell(workdir='wkdir', command=['du', '-s', '-k', '.']) + ExpectShell.log('stdio', stderr='abcdef\n') + 1 ) self.expectOutcome(result=FAILURE, state_string="treesize unknown (failure)") return self.runStep() class SetPropertyFromCommand(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_constructor_conflict(self): with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(property='foo', extract_fn=lambda: None) def test_run_property(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "abcdef") # note: stripped self.expectLogfile('property changes', r"res: " + repr('abcdef')) return self.runStep() def test_renderable_workdir(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="cmd", workdir=properties.Interpolate('wkdir'))) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "abcdef") # note: stripped self.expectLogfile('property changes', r"res: " + repr('abcdef')) return self.runStep() def test_run_property_no_strip(self): self.setupStep(shell.SetPropertyFromCommand(property="res", command="cmd", strip=False)) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='\n\nabcdef\n') + 0 ) self.expectOutcome(result=SUCCESS, state_string="property 'res' set") self.expectProperty("res", "\n\nabcdef\n") self.expectLogfile('property changes', r"res: " + repr('\n\nabcdef\n')) return self.runStep() def test_run_failure(self): self.setupStep( shell.SetPropertyFromCommand(property="res", command="blarg")) self.expectCommands( ExpectShell(workdir='wkdir', command="blarg") + ExpectShell.log('stdio', stderr='cannot blarg: File not found') + 1 ) self.expectOutcome(result=FAILURE, state_string="'blarg' (failure)") self.expectNoProperty("res") return self.runStep() def test_run_extract_fn(self): def extract_fn(rc, stdout, stderr): self.assertEqual( (rc, stdout, stderr), (0, 'startend\n', 'STARTEND\n')) return dict(a=1, b=2) self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout='start', stderr='START') + ExpectShell.log('stdio', stdout='end') + ExpectShell.log('stdio', stderr='END') + 0 ) self.expectOutcome(result=SUCCESS, state_string="2 properties set") self.expectLogfile('property changes', 'a: 1\nb: 2') self.expectProperty("a", 1) self.expectProperty("b", 2) return self.runStep() def test_run_extract_fn_cmdfail(self): def extract_fn(rc, stdout, stderr): self.assertEqual((rc, stdout, stderr), (3, '', '')) return dict(a=1, b=2) self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 3 ) # note that extract_fn *is* called anyway self.expectOutcome(result=FAILURE, state_string="2 properties set (failure)") self.expectLogfile('property changes', 'a: 1\nb: 2') return self.runStep() def test_run_extract_fn_cmdfail_empty(self): def extract_fn(rc, stdout, stderr): self.assertEqual((rc, stdout, stderr), (3, '', '')) return dict() self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 3 ) # note that extract_fn *is* called anyway, but returns no properties self.expectOutcome(result=FAILURE, state_string="'cmd' (failure)") return self.runStep() @defer.inlineCallbacks def test_run_extract_fn_exception(self): def extract_fn(rc, stdout, stderr): raise RuntimeError("oh noes") self.setupStep( shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + 0 ) # note that extract_fn *is* called anyway, but returns no properties self.expectOutcome(result=EXCEPTION, state_string="'cmd' (exception)") yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_error_both_set(self): """ If both ``extract_fn`` and ``property`` are defined, ``SetPropertyFromCommand`` reports a config error. """ with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(command=["echo", "value"], property="propname", extract_fn=lambda x: {"propname": "hello"}) def test_error_none_set(self): """ If neither ``extract_fn`` and ``property`` are defined, ``SetPropertyFromCommand`` reports a config error. """ with self.assertRaises(config.ConfigErrors): shell.SetPropertyFromCommand(command=["echo", "value"]) class PerlModuleTest(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_new_version_success(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Test Summary Report Result: PASS Tests: 10 Failed: 0 Tests: 10 Failed: 0 Files=93, Tests=20""")) + 0 ) self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed') return self.runStep() def test_new_version_warnings(self): self.setupStep(shell.PerlModuleTest(command="cmd", warningPattern='^OHNOES')) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Test Summary Report ------------------- foo.pl (Wstat: 0 Tests: 10 Failed: 0) Failed test: 0 OHNOES 1 OHNOES 2 Files=93, Tests=20, 0 wallclock secs ... Result: PASS""")) + 0 ) self.expectOutcome( result=WARNINGS, state_string='20 tests 20 passed 2 warnings (warnings)') return self.runStep() def test_new_version_failed(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ foo.pl .. 1/4""")) + ExpectShell.log('stdio', stderr=textwrap.dedent("""\ # Failed test 2 in foo.pl at line 6 # foo.pl line 6 is: ok(0);""")) + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ foo.pl .. Failed 1/4 subtests Test Summary Report ------------------- foo.pl (Wstat: 0 Tests: 4 Failed: 1) Failed test: 0 Files=1, Tests=4, 0 wallclock secs ( 0.06 usr 0.01 sys + 0.03 cusr 0.01 csys = 0.11 CPU) Result: FAIL""")) + ExpectShell.log('stdio', stderr=textwrap.dedent("""\ Failed 1/1 test programs. 1/4 subtests failed.""")) + 1 ) self.expectOutcome(result=FAILURE, state_string='4 tests 3 passed 1 failed (failure)') return self.runStep() def test_old_version_success(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored All tests successful Files=10, Tests=20, 100 wall blah blah""")) + 0 ) self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed') return self.runStep() def test_old_version_failed(self): self.setupStep(shell.PerlModuleTest(command="cmd")) self.expectCommands( ExpectShell(workdir='wkdir', command="cmd") + ExpectShell.log('stdio', stdout=textwrap.dedent("""\ This junk ignored Failed 1/1 test programs, 3/20 subtests failed.""")) + 1 ) self.expectOutcome(result=FAILURE, state_string='20 tests 17 passed 3 failed (failure)') return self.runStep() class SetPropertyDeprecation(unittest.TestCase): """ Tests for L{shell.SetProperty} """ def test_deprecated(self): """ Accessing L{shell.SetProperty} reports a deprecation error. """ shell.SetProperty warnings = self.flushWarnings([self.test_deprecated]) self.assertEqual(len(warnings), 1) self.assertIdentical(warnings[0]['category'], DeprecationWarning) self.assertEqual(warnings[0]['message'], "buildbot.steps.shell.SetProperty was deprecated in Buildbot 0.8.8: " "It has been renamed to SetPropertyFromCommand" ) class Configure(unittest.TestCase): def test_class_attrs(self): # nothing too exciting here, but at least make sure the class is # present step = shell.Configure() self.assertEqual(step.command, ['./configure']) class WarningCountingShellCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_no_warnings(self): self.setupStep(shell.WarningCountingShellCommand(workdir='w', command=['make'])) self.expectCommands( ExpectShell(workdir='w', command=["make"]) + ExpectShell.log('stdio', stdout='blarg success!') + 0 ) self.expectOutcome(result=SUCCESS) self.expectProperty("warnings-count", 0) return self.runStep() def test_default_pattern(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'])) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='normal: foo\nwarning: blarg!\n' 'also normal\nWARNING: blarg!\n') + 0 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 2) self.expectLogfile("warnings (2)", "warning: blarg!\nWARNING: blarg!\n") return self.runStep() def test_custom_pattern(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], warningPattern=r"scary:.*")) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='scary: foo\nwarning: bar\nscary: bar') + 0 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 2) self.expectLogfile("warnings (2)", "scary: foo\nscary: bar\n") return self.runStep() def test_maxWarnCount(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], maxWarnCount=9)) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='warning: noo!\n' * 10) + 0 ) self.expectOutcome(result=FAILURE) self.expectProperty("warnings-count", 10) return self.runStep() def test_fail_with_warnings(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'])) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout='warning: I might fail') + 3 ) self.expectOutcome(result=FAILURE) self.expectProperty("warnings-count", 1) self.expectLogfile("warnings (1)", "warning: I might fail\n") return self.runStep() def test_warn_with_decoderc(self): self.setupStep(shell.WarningCountingShellCommand(command=['make'], decodeRC={3: WARNINGS})) self.expectCommands( ExpectShell(workdir='wkdir', command=["make"], ) + ExpectShell.log('stdio', stdout='I might fail with rc') + 3 ) self.expectOutcome(result=WARNINGS) self.expectProperty("warnings-count", 0) return self.runStep() def do_test_suppressions(self, step, supps_file='', stdout='', exp_warning_count=0, exp_warning_log='', exp_exception=False, props=None): self.setupStep(step) if props is not None: for key in props: self.build.setProperty(key, props[key], "") # Invoke the expected callbacks for the suppression file upload. Note # that this assumes all of the remote_* are synchronous, but can be # easily adapted to suit if that changes (using inlineCallbacks) def upload_behavior(command): writer = command.args['writer'] writer.remote_write(supps_file) writer.remote_close() command.rc = 0 if supps_file is not None: self.expectCommands( # step will first get the remote suppressions file Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='supps', workdir='wkdir', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(upload_behavior), # and then run the command ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout=stdout) + 0 ) else: self.expectCommands( ExpectShell(workdir='wkdir', command=["make"]) + ExpectShell.log('stdio', stdout=stdout) + 0 ) if exp_exception: self.expectOutcome(result=EXCEPTION, state_string="'make' (exception)") else: if exp_warning_count != 0: self.expectOutcome(result=WARNINGS, state_string="'make' (warnings)") self.expectLogfile("warnings (%d)" % exp_warning_count, exp_warning_log) else: self.expectOutcome(result=SUCCESS, state_string="'make'") self.expectProperty("warnings-count", exp_warning_count) return self.runStep() def test_suppressions(self): step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps') supps_file = textwrap.dedent("""\ # example suppressions file amar.c : .*unused variable.* holding.c : .*invalid access to non-static.* """).strip() stdout = textwrap.dedent("""\ /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah amar.c: In function 'write_record': amar.c:164: warning: unused variable 'x' amar.c:164: warning: this should show up /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah /bin/sh ../libtool --tag=CC --silent --mode=link gcc blah holding.c: In function 'holding_thing': holding.c:984: warning: invalid access to non-static 'y' """) exp_warning_log = textwrap.dedent("""\ amar.c:164: warning: this should show up """) return self.do_test_suppressions(step, supps_file, stdout, 1, exp_warning_log) def test_suppressions_directories(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) supps_file = textwrap.dedent("""\ # these should be suppressed: amar-src/amar.c : XXX .*/server-src/.* : AAA # these should not, as the dirs do not match: amar.c : YYY server-src.* : BBB """).strip() # note that this uses the unicode smart-quotes that gcc loves so much stdout = textwrap.dedent("""\ make: Entering directory \u2019amar-src\u2019 amar.c:164: warning: XXX amar.c:165: warning: YYY make: Leaving directory 'amar-src' make: Entering directory "subdir" make: Entering directory 'server-src' make: Entering directory `one-more-dir` holding.c:999: warning: BBB holding.c:1000: warning: AAA """) exp_warning_log = textwrap.dedent("""\ amar.c:165: warning: YYY holding.c:999: warning: BBB """) return self.do_test_suppressions(step, supps_file, stdout, 2, exp_warning_log) def test_suppressions_directories_custom(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor, directoryEnterPattern="^IN: (.*)", directoryLeavePattern="^OUT:") supps_file = "dir1/dir2/abc.c : .*" stdout = textwrap.dedent("""\ IN: dir1 IN: decoy OUT: decoy IN: dir2 abc.c:123: warning: hello """) return self.do_test_suppressions(step, supps_file, stdout, 0, '') def test_suppressions_linenos(self): def warningExtractor(step, line, match): return line.split(':', 2) step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) supps_file = "abc.c:.*:100-199\ndef.c:.*:22" stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, supps_file, stdout, 2, exp_warning_log) @defer.inlineCallbacks def test_suppressions_warningExtractor_exc(self): def warningExtractor(step, line, match): raise RuntimeError("oh noes") step = shell.WarningCountingShellCommand(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) # need at least one supp to trigger warningExtractor supps_file = 'x:y' stdout = "abc.c:99: warning: seen 1" yield self.do_test_suppressions(step, supps_file, stdout, exp_exception=True) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_suppressions_addSuppression(self): # call addSuppression "manually" from a subclass class MyWCSC(shell.WarningCountingShellCommand): def start(self): self.addSuppression([('.*', '.*unseen.*', None, None)]) return super().start() def warningExtractor(step, line, match): return line.split(':', 2) step = MyWCSC(command=['make'], suppressionFile='supps', warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, '', stdout, 2, exp_warning_log) def test_suppressions_suppressionsParameter(self): def warningExtractor(step, line, match): return line.split(':', 2) supps = ( ("abc.c", ".*", 100, 199), ("def.c", ".*", 22, 22), ) step = shell.WarningCountingShellCommand(command=['make'], suppressionList=supps, warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, None, stdout, 2, exp_warning_log) def test_suppressions_suppressionsRenderableParameter(self): def warningExtractor(step, line, match): return line.split(':', 2) supps = ( ("abc.c", ".*", 100, 199), ("def.c", ".*", 22, 22), ) step = shell.WarningCountingShellCommand(command=['make'], suppressionList=properties.Property("suppressionsList"), warningExtractor=warningExtractor) stdout = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:150: warning: unseen def.c:22: warning: unseen abc.c:200: warning: seen 2 """) exp_warning_log = textwrap.dedent("""\ abc.c:99: warning: seen 1 abc.c:200: warning: seen 2 """) return self.do_test_suppressions(step, None, stdout, 2, exp_warning_log, props={"suppressionsList": supps}) def test_warnExtractFromRegexpGroups(self): step = shell.WarningCountingShellCommand(command=['make']) we = shell.WarningCountingShellCommand.warnExtractFromRegexpGroups line, pat, exp_file, exp_lineNo, exp_text = \ ('foo:123:text', '(.*):(.*):(.*)', 'foo', 123, 'text') self.assertEqual(we(step, line, re.match(pat, line)), (exp_file, exp_lineNo, exp_text)) def test_missing_command_error(self): # this checks that an exception is raised for invalid arguments with self.assertRaisesConfigError( "WarningCountingShellCommand's `command' argument is not " "specified"): shell.WarningCountingShellCommand() class Compile(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_class_args(self): # since this step is just a pre-configured WarningCountingShellCommand, # there' not much to test! step = self.setupStep(shell.Compile()) self.assertEqual(step.name, "compile") self.assertTrue(step.haltOnFailure) self.assertTrue(step.flunkOnFailure) self.assertEqual(step.description, ["compiling"]) self.assertEqual(step.descriptionDone, ["compile"]) self.assertEqual(step.command, ["make", "all"]) class Test(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpBuildStep() def tearDown(self): self.tearDownBuildStep() def test_setTestResults(self): step = self.setupStep(shell.Test()) step.setTestResults(total=10, failed=3, passed=5, warnings=3) self.assertEqual(step.statistics, { 'tests-total': 10, 'tests-failed': 3, 'tests-passed': 5, 'tests-warnings': 3, }) # ensure that they're additive step.setTestResults(total=1, failed=2, passed=3, warnings=4) self.assertEqual(step.statistics, { 'tests-total': 11, 'tests-failed': 5, 'tests-passed': 8, 'tests-warnings': 7, }) def test_describe_not_done(self): step = self.setupStep(shell.Test()) step.rendered = True self.assertEqual(step.describe(), None) def test_describe_done(self): step = self.setupStep(shell.Test()) step.rendered = True step.statistics['tests-total'] = 93 step.statistics['tests-failed'] = 10 step.statistics['tests-passed'] = 20 step.statistics['tests-warnings'] = 30 self.assertEqual(step.describe(done=True), ['93 tests', '20 passed', '30 warnings', '10 failed']) def test_describe_done_no_total(self): step = self.setupStep(shell.Test()) step.rendered = True step.statistics['tests-total'] = 0 step.statistics['tests-failed'] = 10 step.statistics['tests-passed'] = 20 step.statistics['tests-warnings'] = 30 # describe calculates 60 = 10+20+30 self.assertEqual(step.describe(done=True), ['60 tests', '20 passed', '30 warnings', '10 failed']) buildbot-2.6.0/master/buildbot/test/unit/test_steps_shellsequence.py000066400000000000000000000176021361162603000260530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.properties import WithProperties from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import shellsequence from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config as configmixin from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class DynamicRun(shellsequence.ShellSequence): def run(self): return self.runShellSequence(self.dynamicCommands) class TestOneShellCommand(steps.BuildStepMixin, configmixin.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def testShellArgInput(self): with self.assertRaisesConfigError( "the 'command' parameter of ShellArg must not be None"): shellsequence.ShellArg(command=None) arg1 = shellsequence.ShellArg(command=1) with self.assertRaisesConfigError( "1 is an invalid command, it must be a string or a list"): arg1.validateAttributes() arg2 = shellsequence.ShellArg(command=["make", 1]) with self.assertRaisesConfigError( "['make', 1] must only have strings in it"): arg2.validateAttributes() for goodcmd in ["make p1", ["make", "p1"]]: arg = shellsequence.ShellArg(command=goodcmd) arg.validateAttributes() def testShellArgsAreRendered(self): arg1 = shellsequence.ShellArg(command=WithProperties('make %s', 'project'), logfile=WithProperties('make %s', 'project')) self.setupStep( shellsequence.ShellSequence(commands=[arg1], workdir='build')) self.properties.setProperty("project", "BUILDBOT-TEST", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST') + 0 + Expect.log('stdio make BUILDBOT-TEST')) # TODO: need to factor command-summary stuff into a utility method and # use it here self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST'") return self.runStep() def createDynamicRun(self, commands): DynamicRun.dynamicCommands = commands return DynamicRun() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsNone(self): self.setupStep(self.createDynamicRun(None)) self.expectOutcome(result=EXCEPTION, state_string="finished (exception)") return self.runStep() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsString(self): self.setupStep(self.createDynamicRun(["one command"])) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') return self.runStep() def testSanityChecksAreDoneInRuntimeWhenDynamicCmdIsInvalidShellArg(self): self.setupStep( self.createDynamicRun([shellsequence.ShellArg(command=1)])) self.expectOutcome(result=EXCEPTION, state_string='finished (exception)') return self.runStep() def testMultipleCommandsAreRun(self): arg1 = shellsequence.ShellArg(command='make p1') arg2 = shellsequence.ShellArg(command='deploy p1', logfile='deploy') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 0, ExpectShell(workdir='build', command='deploy p1') + 0 + Expect.log('stdio deploy p1')) self.expectOutcome(result=SUCCESS, state_string="'deploy p1'") return self.runStep() def testSkipWorks(self): arg1 = shellsequence.ShellArg(command='make p1') arg2 = shellsequence.ShellArg(command='') arg3 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2, arg3], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 0, ExpectShell(workdir='build', command='deploy p1') + 0) self.expectOutcome(result=SUCCESS, state_string="'deploy p1'") return self.runStep() def testWarningWins(self): arg1 = shellsequence.ShellArg(command='make p1', warnOnFailure=True, flunkOnFailure=False) arg2 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 1, ExpectShell(workdir='build', command='deploy p1') + 0) self.expectOutcome(result=WARNINGS, state_string="'deploy p1'") return self.runStep() def testSequenceStopsOnHaltOnFailure(self): arg1 = shellsequence.ShellArg(command='make p1', haltOnFailure=True) arg2 = shellsequence.ShellArg(command='deploy p1') self.setupStep( shellsequence.ShellSequence(commands=[arg1, arg2], workdir='build')) self.expectCommands(ExpectShell(workdir='build', command='make p1') + 1) self.expectOutcome(result=FAILURE, state_string="'make p1'") return self.runStep() def testShellArgsAreRenderedAnewAtEachBuild(self): """Unit test to ensure that ShellArg instances are properly re-rendered. This unit test makes sure that ShellArg instances are rendered anew at each new build. """ arg = shellsequence.ShellArg(command=WithProperties('make %s', 'project'), logfile=WithProperties('make %s', 'project')) step = shellsequence.ShellSequence(commands=[arg], workdir='build') # First "build" self.setupStep(step) self.properties.setProperty("project", "BUILDBOT-TEST-1", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST-1') + 0 + Expect.log('stdio make BUILDBOT-TEST-1')) self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST-1'") self.runStep() # Second "build" self.setupStep(step) self.properties.setProperty("project", "BUILDBOT-TEST-2", "TEST") self.expectCommands(ExpectShell(workdir='build', command='make BUILDBOT-TEST-2') + 0 + Expect.log('stdio make BUILDBOT-TEST-2')) self.expectOutcome(result=SUCCESS, state_string="'make BUILDBOT-TEST-2'") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_base_Source.py000066400000000000000000000147251361162603000270300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot.steps.source import Source from buildbot.test.util import sourcesteps from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestSource(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_start_alwaysUseLatest_True(self): step = self.setupStep(Source(alwaysUseLatest=True), { 'branch': 'other-branch', 'revision': 'revision', }, patch='patch' ) step.branch = 'branch' step.startVC = mock.Mock() step.startStep(mock.Mock()) self.assertEqual(step.startVC.call_args, (('branch', None, None), {})) def test_start_alwaysUseLatest_False(self): step = self.setupStep(Source(), { 'branch': 'other-branch', 'revision': 'revision', }, patch='patch' ) step.branch = 'branch' step.startVC = mock.Mock() step.startStep(mock.Mock()) self.assertEqual( step.startVC.call_args, (('other-branch', 'revision', 'patch'), {})) def test_start_alwaysUseLatest_False_no_branch(self): step = self.setupStep(Source()) step.branch = 'branch' step.startVC = mock.Mock() step.startStep(mock.Mock()) self.assertEqual(step.startVC.call_args, (('branch', None, None), {})) def test_start_no_codebase(self): step = self.setupStep(Source()) step.branch = 'branch' step.startVC = mock.Mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.describe(), ['updating']) self.assertEqual(step.name, Source.name) step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('',)) self.assertEqual(step.description, ['updating']) def test_start_with_codebase(self): step = self.setupStep(Source(codebase='codebase')) step.branch = 'branch' step.startVC = mock.Mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.describe(), ['updating', 'codebase']) step.name = self.successResultOf(step.build.render(step.name)) self.assertEqual(step.name, Source.name + "-codebase") step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('codebase',)) self.assertEqual(step.describe(True), ['update', 'codebase']) def test_start_with_codebase_and_descriptionSuffix(self): step = self.setupStep(Source(codebase='my-code', descriptionSuffix='suffix')) step.branch = 'branch' step.startVC = mock.Mock() step.build.getSourceStamp = mock.Mock() step.build.getSourceStamp.return_value = None self.assertEqual(step.describe(), ['updating', 'suffix']) step.name = self.successResultOf(step.build.render(step.name)) self.assertEqual(step.name, Source.name + "-my-code") step.startStep(mock.Mock()) self.assertEqual(step.build.getSourceStamp.call_args[0], ('my-code',)) self.assertEqual(step.describe(True), ['update', 'suffix']) class TestSourceDescription(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_constructor_args_strings(self): step = Source(workdir='build', description='svn update (running)', descriptionDone='svn update') self.assertEqual(step.description, ['svn update (running)']) self.assertEqual(step.descriptionDone, ['svn update']) def test_constructor_args_lists(self): step = Source(workdir='build', description=['svn', 'update', '(running)'], descriptionDone=['svn', 'update']) self.assertEqual(step.description, ['svn', 'update', '(running)']) self.assertEqual(step.descriptionDone, ['svn', 'update']) class AttrGroup(Source): def other_method(self): pass def mode_full(self): pass def mode_incremental(self): pass class TestSourceAttrGroup(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_attrgroup_hasattr(self): step = AttrGroup() self.assertTrue(step._hasAttrGroupMember('mode', 'full')) self.assertTrue(step._hasAttrGroupMember('mode', 'incremental')) self.assertFalse(step._hasAttrGroupMember('mode', 'nothing')) def test_attrgroup_getattr(self): step = AttrGroup() self.assertEqual(step._getAttrGroupMember('mode', 'full'), step.mode_full) self.assertEqual(step._getAttrGroupMember('mode', 'incremental'), step.mode_incremental) with self.assertRaises(AttributeError): step._getAttrGroupMember('mode', 'nothing') def test_attrgroup_listattr(self): step = AttrGroup() self.assertEqual(sorted(step._listAttrGroupMembers('mode')), ['full', 'incremental']) buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_bzr.py000066400000000000000000000754371361162603000254020ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import bzr from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestBzr(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_win32path(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.build.path_module = namedModule('ntpath') self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_timeout(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh', timeout=1)) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh'), args=dict(revision='3730')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '3730']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patched(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, # clean up the applied patch ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, # this clean is from 'mode=clean' ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.FileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clean_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clean'), args=dict(revision='2345')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--ignored', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '2345']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_fresh(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'clean-tree', '--force']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_retry(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='clobber'), args=dict(revision='3730')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.', '-r', '3730']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_baseurl(self): self.setupStep( bzr.Bzr(baseURL='http://bzr.squid-cache.org/bzr/squid3', defaultBranch='trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', os.path.join('http://bzr.squid-cache.org/bzr/squid3', 'trunk'), '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_clobber_baseurl_nodefault(self): self.setupStep( bzr.Bzr(baseURL='http://bzr.squid-cache.org/bzr/squid3', defaultBranch='trunk', mode='full', method='clobber'), args=dict(branch='branches/SQUID_3_0')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', os.path.join('http://bzr.squid-cache.org/bzr/squid3', 'branches/SQUID_3_0'), '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_full_copy(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='build', logEnviron=True)) + 0, Expect('stat', dict(file='source/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['bzr', 'update']) + 0, Expect('cpdir', {'fromdir': 'source', 'logEnviron': True, 'todir': 'build'}) + 0, ExpectShell(workdir='source', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental_revision(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental'), args=dict(revision='9384')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'update', '-r', '9384']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'Bzr') return self.runStep() def test_mode_incremental_no_existing_repo(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100\n', 'Bzr') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='100\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100\n', 'Bzr') return self.runStep() def test_bad_revparse(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + 0, ExpectShell(workdir='wkdir', command=['bzr', 'version-info', '--custom', "--template='{revno}"]) + ExpectShell.log('stdio', stdout='oiasdfj010laksjfd') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_bad_checkout(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.bzr', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['bzr', 'checkout', 'http://bzr.squid-cache.org/bzr/squid3/trunk', '.']) + ExpectShell.log('stdio', stderr='failed\n') + 128, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( bzr.Bzr(repourl='http://bzr.squid-cache.org/bzr/squid3/trunk', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['bzr', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_cvs.py000066400000000000000000001676671361162603000254070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import time from twisted.internet import error from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import cvs from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin def uploadString(cvsroot): def behavior(command): writer = command.args['writer'] writer.remote_write(cvsroot + "\n") writer.remote_close() return behavior class TestCVS(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def setupStep(self, step, *args, **kwargs): super().setupStep(step, *args, **kwargs) # make parseGotRevision return something consistent, patching the class # instead of the object since a new object is constructed by runTest. def parseGotRevision(self, res): self.updateSourceProperty('got_revision', '2012-09-09 12:00:39 +0000') return res self.patch(cvs.CVS, 'parseGotRevision', parseGotRevision) def test_parseGotRevision(self): def gmtime(): return time.struct_time((2012, 9, 9, 12, 9, 33, 6, 253, 0)) self.patch(time, 'gmtime', gmtime) step = cvs.CVS(cvsroot="x", cvsmodule="m", mode='full', method='clean') props = [] def updateSourceProperty(prop, name): props.append((prop, name)) step.updateSourceProperty = updateSourceProperty self.assertEqual(step.parseGotRevision(10), 10) # passes res along self.assertEqual(props, [('got_revision', '2012-09-09 12:09:33 +0000')]) def test_cvsEntriesContainStickyDates(self): step = cvs.CVS(cvsroot="x", cvsmodule="m", mode='full', method='clean') self.assertEqual(step._cvsEntriesContainStickyDates('D'), False) self.assertEqual(step._cvsEntriesContainStickyDates( '/file/1.1/Fri May 17 23:20:00//TMOZILLA_1_0_0_BRANCH\nD'), False) self.assertEqual(step._cvsEntriesContainStickyDates( '/file/1.1/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD'), True) self.assertEqual(step._cvsEntriesContainStickyDates( '/file1/1.1/Fri May 17 23:20:00//\n/file2/1.1.2.3/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD'), True) def test_mode_full_clean_and_login(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', login="a password")) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', 'login'], initialStdin="a password\n") + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_and_login_worker_2_16(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', login="a password"), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', 'login'], initialStdin="a password\n") + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, slavesrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', timeout=1)) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_branch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean', branch='branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_branch_sourcestamp(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean'), args={'branch': 'my_branch'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_fresh(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clobber(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clobber') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clobber_retry(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clobber', retry=(0, 2)) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_copy(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='copy') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='source', command=['cvs', '-z3', 'update', '-dP']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': step.timeout}) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_copy_wrong_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='copy') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='source/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='source', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'source', 'mozilla/browser/']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': step.timeout}) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_sticky_date(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString('/file/1.1/Fri May 17 23:20:00//D2013.10.08.11.20.33\nD')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_password_windows(self): self.setupStep( cvs.CVS(cvsroot=":pserver:dustin:secrets@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) # on Windows, this file does not contain the password, per # http://trac.buildbot.net/ticket/2355 + Expect.behavior( uploadString(':pserver:dustin@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', branch='my_branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_special_case(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', branch='HEAD'), args=dict(revision='2012-08-16 16:05:16 +0000')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', # note, no -r HEAD here - that's the special # case '-D', '2012-08-16 16:05:16 +0000']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_sourcestamp(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental'), args={'branch': 'my_branch'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP', '-r', 'my_branch']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_not_loggedin(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_no_existing_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_retry(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', retry=(0, 1)) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_wrong_repo(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_wrong_module(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental') self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_clean_wrong_repo(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('the-end-of-the-universe')) + 0, ExpectShell(workdir='', command=['cvs', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_full_no_method(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_with_options(self): step = cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', global_options=['-q'], extra_options=['-l']) self.setupStep(step) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=step.timeout)) + 0, ExpectShell(workdir='', command=['cvs', '-q', '-d', ':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot', '-z3', 'checkout', '-d', 'wkdir', '-l', 'mozilla/browser/']) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_mode_incremental_with_env_logEnviron(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental', env={'abc': '123'}, logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version'], env={'abc': '123'}, logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvs', '-z3', 'update', '-dP'], env={'abc': '123'}, logEnviron=False) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '2012-09-09 12:00:39 +0000', 'CVS') return self.runStep() def test_command_fails(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 128, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_cvsdiscard_fails(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Root', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior( uploadString(':pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Repository', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('mozilla/browser/')) + 0, Expect('uploadFile', dict(blocksize=32768, maxsize=None, workersrc='Entries', workdir='wkdir/CVS', writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString('/file/1.1/Fri May 17 23:20:00//\nD')) + 0, ExpectShell(workdir='wkdir', command=['cvsdiscard', '--ignore']) + ExpectShell.log('stdio', stderr='FAIL!\n') + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( cvs.CVS(cvsroot=":pserver:anonymous@cvs-mirror.mozilla.org:/cvsroot", cvsmodule="mozilla/browser/", mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['cvs', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_darcs.py000066400000000000000000000440011361162603000256600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import darcs from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestDarcs(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_no_empty_step_config(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs() def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='fresh') def test_incremental_invalid_method(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(repourl='http://localhost/darcs', mode='incremental', method='fresh') def test_no_repo_url(self): with self.assertRaises(config.ConfigErrors): darcs.Darcs(mode='full', method='fresh') def test_mode_full_clobber(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_copy(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_no_method(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'pull', '--all', '--verbose']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_patched(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, Expect('stat', dict(file='build/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='build', command=['darcs', 'pull', '--all', '--verbose']) + 0, ExpectShell(workdir='build', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_patch(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'pull', '--all', '--verbose']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_retry(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_revision(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber'), dict(revision='abcdef01')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.darcs-context', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', '--context', '.darcs-context', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_full_clobber_revision_worker_2_16(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber'), dict(revision='abcdef01'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.darcs-context', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', '--context', '.darcs-context', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_mode_incremental_no_existing_repo(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_darcs', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['darcs', 'get', '--verbose', '--lazy', '--repo-name', 'wkdir', 'http://localhost/darcs']) + 0, ExpectShell(workdir='wkdir', command=['darcs', 'changes', '--max-count=1']) + ExpectShell.log('stdio', stdout='Tue Aug 20 09:18:41 IST 2013 abc@gmail.com') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'Tue Aug 20 09:18:41 IST 2013 abc@gmail.com', 'Darcs') return self.runStep() def test_worker_connection_lost(self): self.setupStep( darcs.Darcs(repourl='http://localhost/darcs', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['darcs', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_gerrit.py000066400000000000000000000220771361162603000260710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.source import gerrit from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestGerrit(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_clean(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("event.patchSet.ref", "gerrit_branch") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'gerrit_branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'gerrit_branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'Gerrit') return self.runStep() def test_mode_full_clean_force_build(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/changes/34/1234/567', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/changes/34/1234/567']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'Gerrit') return self.runStep() def test_mode_full_clean_force_same_project(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', codebase='buildbot')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/changes/34/1234/567', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/changes/34/1234/567']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', {'buildbot': 'f6ad368298bd941e934a41f3babc827b2aa95a1d'}, 'Gerrit') return self.runStep() def test_mode_full_clean_different_project(self): self.setupStep( gerrit.Gerrit(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', codebase='buildbot')) self.build.setProperty("event.change.project", "buildbot") self.sourcestamp.project = 'not_buildbot' self.build.setProperty("gerrit_change", "1234/567") self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_git.py000066400000000000000000005356241361162603000253670ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.internet import error from twisted.trial import unittest from buildbot import config as bbconfig from buildbot.interfaces import WorkerTooOldError from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import git from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin class TestGit(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.Git def setUp(self): self.setUpTestReactor() self.sourceName = self.stepClass.__name__ return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_clean(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_progress_False(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', progress=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_command = 'ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.Builder.wkdir.buildbot/ssh-wrapper.sh' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() @parameterized.expand([ ('host_key', dict(sshHostKey='sshhostkey')), ('known_hosts', dict(sshKnownHosts='known_hosts')), ]) def test_mode_full_clean_ssh_host_key_2_10(self, name, class_params): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', **class_params)) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' ssh_command = \ 'ssh -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.Builder.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_1_7_progress(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey', progress=True)) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.Builder.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_ssh_host_key_2_10_abs_workdir(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey', sshHostKey='sshhostkey'), wantDefaultWorkdir=False) workdir = '/myworkdir/workdir' self.build.workdir = workdir ssh_workdir = '/myworkdir/.Builder.workdir.buildbot' ssh_key_path = '/myworkdir/.Builder.workdir.buildbot/ssh-key' ssh_known_hosts_path = '/myworkdir/.Builder.workdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir=workdir, command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='/myworkdir/workdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir=workdir, mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir=workdir, mode=0o400)) + 0, Expect('listdir', {'dir': workdir, 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir=workdir, command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir=workdir, command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir=workdir, command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir=workdir, command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.changeWorkerSystem('win32') self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.Builder.wkdir.buildbot' ssh_key_path = '\\wrk\\.Builder.wkdir.buildbot\\ssh-key' ssh_command_config = 'core.sshCommand=ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_2_3(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.Builder.wkdir.buildbot' ssh_key_path = '\\wrk\\.Builder.wkdir.buildbot\\ssh-key' ssh_command = 'ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_win32path_ssh_key_1_7(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', sshPrivateKey='sshkey')) self.changeWorkerSystem('win32') ssh_workdir = '\\wrk\\.Builder.wkdir.buildbot' ssh_key_path = '\\wrk\\.Builder.wkdir.buildbot\\ssh-key' ssh_wrapper_path = '\\wrk\\.Builder.wkdir.buildbot\\ssh-wrapper.sh' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('stat', dict(file='wkdir\\.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD'], env={'GIT_SSH': ssh_wrapper_path}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', timeout=1, mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=True) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=True) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['git', 'update-index', '--refresh']) + 0, ExpectShell(workdir='wkdir', command=['git', 'apply', '--index', '-p', '1'], initialStdin='patch') + 1, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clean_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_non_empty_builddir(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['file1', 'file2']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clean_parsefail(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + ExpectShell.log('stdio', stderr="fatal: Could not parse object " "'b08076bc71c7813038f2cefedff9c5b678d225a8'.\n") + 128, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_with_reference(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', reference='path/to/reference/repo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--reference', 'path/to/reference/repo', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_no_existing_repo_with_origin(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', origin='foo', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--origin', 'foo', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean', submodules=True, progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clone_fails(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, # clone fails ) self.expectOutcome(result=FAILURE, state_string="update (failure)") self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clobber_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_no_branch_support(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.5.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_oldworker(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', progress=True)) self.step.build.getWorkerCommandVersion = lambda cmd, oldversion: "2.15" self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.git', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', progress=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_version_format(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5.1') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retry(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_branch_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', branch='test-branch', sshPrivateKey='ssh-key', progress=True)) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_clean_fails(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 1, # clean fails -> clobber Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'abcdef01', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_given_revision_not_exists(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', 'abcdef01']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'abcdef01', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty('got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule_git_newer_1_7_6(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.6') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--force']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_submodule_v1_7_8(self): """This tests the same as test_mode_full_fresh_submodule, but the "submodule update" command should be different for Git v1.7.8+.""" self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.8') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'sync']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive', '--force', '--checkout']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'foreach', '--recursive', 'git clean -f -f -d -x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_shallow(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', shallow=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--depth', '1', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_shallow_depth(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', shallow="100")) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--depth', '100', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_no_shallow(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retryFetch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retryFetch=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_retryFetch_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', retryFetch=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 1, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'test-branch']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_clobberOnFailure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', clobberOnFailure=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_clobberOnFailure_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', clobberOnFailure=True, branch='test-branch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'test-branch', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', '--branch', 'test-branch', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)), Expect('listdir', {'dir': 'source', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='source', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='source', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy_ssh_key_2_10(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy', sshPrivateKey='sshkey')) ssh_workdir = '/wrk/.Builder.source.buildbot' ssh_key_path = '/wrk/.Builder.source.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='source', mode=0o400)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)), Expect('listdir', {'dir': 'source', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='source', command=['git', '-c', ssh_command_config, 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='source', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_copy_shallow(self): with self.assertRaisesConfigError( "shallow only possible with mode 'full' and method 'clobber'"): self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='copy', shallow=True) def test_mode_incremental_no_existing_repo(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_incremental_no_existing_repo_oldworker(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental')) self.step.build.getWorkerCommandVersion = lambda cmd, oldversion: "2.15" self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.git', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'abcdef01', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_revparse_failure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'abcdef01', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ada95a1d') # too short + 0, ) self.expectOutcome(result=FAILURE) self.expectNoProperty('got_revision') return self.runStep() def test_mode_full_clobber_submodule(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', submodules=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'submodule', 'update', '--init', '--recursive']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_repourl(self): with self.assertRaisesConfigError("must provide repourl"): self.stepClass(mode="full") def test_mode_full_fresh_revision(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', progress=True), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'abcdef01', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_fresh_clobberOnFailure(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='fresh', clobberOnFailure=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', []) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_no_method(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_with_env(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_mode_full_logEnviron(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version'], logEnviron=False) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': False, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d', '-x'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD'], logEnviron=False) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_wkdir_doesnt_exist(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 1, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) return self.runStep() def test_getDescription(self): # clone of: test_mode_incremental # only difference is to set the getDescription property self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', getDescription=True)) self.expectCommands( # copied from test_mode_incremental: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe', 'HEAD']) + ExpectShell.log('stdio', stdout='Tag-1234') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectProperty('commit-description', 'Tag-1234', self.sourceName) return self.runStep() def test_getDescription_failed(self): # clone of: test_mode_incremental # only difference is to set the getDescription property # this tests when 'git describe' fails; for example, there are no # tags in the repository self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='incremental', getDescription=True)) self.expectCommands( # copied from test_mode_incremental: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe', 'HEAD']) + ExpectShell.log('stdio', stdout='') + 128, # error, but it's suppressed ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectNoProperty('commit-description') return self.runStep() def setup_getDescription_test(self, setup_args, output_args, expect_head=True, codebase=None): # clone of: test_mode_full_clobber # only difference is to set the getDescription property kwargs = {} if codebase is not None: kwargs.update(codebase=codebase) self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clobber', progress=True, getDescription=setup_args, **kwargs)) self.expectCommands( # copied from test_mode_full_clobber: ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, ExpectShell(workdir='wkdir', command=['git', 'clone', 'http://github.com/buildbot/buildbot.git', '.', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, # plus this to test describe: ExpectShell(workdir='wkdir', command=['git', 'describe'] + output_args + (['HEAD'] if expect_head else [])) + ExpectShell.log('stdio', stdout='Tag-1234') + 0, ) if codebase: self.expectOutcome(result=SUCCESS, state_string="update " + codebase) self.expectProperty( 'got_revision', {codebase: 'f6ad368298bd941e934a41f3babc827b2aa95a1d'}, self.sourceName) self.expectProperty( 'commit-description', {codebase: 'Tag-1234'}, self.sourceName) else: self.expectOutcome(result=SUCCESS, state_string="update") self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', self.sourceName) self.expectProperty('commit-description', 'Tag-1234', self.sourceName) def test_getDescription_empty_dict(self): self.setup_getDescription_test( setup_args={}, output_args=[] ) return self.runStep() def test_getDescription_empty_dict_with_codebase(self): self.setup_getDescription_test( setup_args={}, output_args=[], codebase='baz' ) return self.runStep() def test_getDescription_match(self): self.setup_getDescription_test( setup_args={'match': 'stuff-*'}, output_args=['--match', 'stuff-*'] ) return self.runStep() def test_getDescription_match_false(self): self.setup_getDescription_test( setup_args={'match': None}, output_args=[] ) return self.runStep() def test_getDescription_tags(self): self.setup_getDescription_test( setup_args={'tags': True}, output_args=['--tags'] ) return self.runStep() def test_getDescription_tags_false(self): self.setup_getDescription_test( setup_args={'tags': False}, output_args=[] ) return self.runStep() def test_getDescription_all(self): self.setup_getDescription_test( setup_args={'all': True}, output_args=['--all'] ) return self.runStep() def test_getDescription_all_false(self): self.setup_getDescription_test( setup_args={'all': False}, output_args=[] ) return self.runStep() def test_getDescription_abbrev(self): self.setup_getDescription_test( setup_args={'abbrev': 7}, output_args=['--abbrev=7'] ) return self.runStep() def test_getDescription_abbrev_zero(self): self.setup_getDescription_test( setup_args={'abbrev': 0}, output_args=['--abbrev=0'] ) return self.runStep() def test_getDescription_abbrev_false(self): self.setup_getDescription_test( setup_args={'abbrev': False}, output_args=[] ) return self.runStep() def test_getDescription_dirty(self): self.setup_getDescription_test( setup_args={'dirty': True}, output_args=['--dirty'], expect_head=False ) return self.runStep() def test_getDescription_dirty_empty_str(self): self.setup_getDescription_test( setup_args={'dirty': ''}, output_args=['--dirty'], expect_head=False ) return self.runStep() def test_getDescription_dirty_str(self): self.setup_getDescription_test( setup_args={'dirty': 'foo'}, output_args=['--dirty=foo'], expect_head=False ) return self.runStep() def test_getDescription_dirty_false(self): self.setup_getDescription_test( setup_args={'dirty': False}, output_args=[], expect_head=True ) return self.runStep() def test_getDescription_dirty_none(self): self.setup_getDescription_test( setup_args={'dirty': None}, output_args=[], expect_head=True ) return self.runStep() def test_getDescription_contains(self): self.setup_getDescription_test( setup_args={'contains': True}, output_args=['--contains'] ) return self.runStep() def test_getDescription_contains_false(self): self.setup_getDescription_test( setup_args={'contains': False}, output_args=[] ) return self.runStep() def test_getDescription_candidates(self): self.setup_getDescription_test( setup_args={'candidates': 7}, output_args=['--candidates=7'] ) return self.runStep() def test_getDescription_candidates_zero(self): self.setup_getDescription_test( setup_args={'candidates': 0}, output_args=['--candidates=0'] ) return self.runStep() def test_getDescription_candidates_false(self): self.setup_getDescription_test( setup_args={'candidates': False}, output_args=[] ) return self.runStep() def test_getDescription_exact_match(self): self.setup_getDescription_test( setup_args={'exact-match': True}, output_args=['--exact-match'] ) return self.runStep() def test_getDescription_exact_match_false(self): self.setup_getDescription_test( setup_args={'exact-match': False}, output_args=[] ) return self.runStep() def test_getDescription_debug(self): self.setup_getDescription_test( setup_args={'debug': True}, output_args=['--debug'] ) return self.runStep() def test_getDescription_debug_false(self): self.setup_getDescription_test( setup_args={'debug': False}, output_args=[] ) return self.runStep() def test_getDescription_long(self): self.setup_getDescription_test( setup_args={'long': True}, output_args=['--long'] ) def test_getDescription_long_false(self): self.setup_getDescription_test( setup_args={'long': False}, output_args=[] ) return self.runStep() def test_getDescription_always(self): self.setup_getDescription_test( setup_args={'always': True}, output_args=['--always'] ) def test_getDescription_always_false(self): self.setup_getDescription_test( setup_args={'always': False}, output_args=[] ) return self.runStep() def test_getDescription_lotsa_stuff(self): self.setup_getDescription_test( setup_args={'match': 'stuff-*', 'abbrev': 6, 'exact-match': True}, output_args=['--exact-match', '--match', 'stuff-*', '--abbrev=6'], codebase='baz' ) return self.runStep() def test_config_option(self): name = 'url.http://github.com.insteadOf' value = 'blahblah' self.setupStep( self.stepClass(repourl='%s/buildbot/buildbot.git' % (value,), mode='full', method='clean', config={name: value})) prefix = ['git', '-c', '%s=%s' % (name, value)] self.expectCommands( ExpectShell(workdir='wkdir', command=prefix + ['--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['clean', '-f', '-f', '-d']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['fetch', '-t', '%s/buildbot/buildbot.git' % ( value,), 'HEAD', '--progress']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=prefix + ['rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_worker_connection_lost(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + ('err', error.ConnectionLost()) ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def _test_WorkerTooOldError(self, _dovccmd, step, msg): def check(failure): self.assertIsInstance(failure.value, WorkerTooOldError) self.assertEqual(str(failure.value), msg) self.patch(self.stepClass, "_dovccmd", _dovccmd) gitStep = self.setupStep(step) gitStep._start_deferred = defer.Deferred() gitStep.startVC("branch", "revision", "patch") d = gitStep._start_deferred.addBoth(check) return d def test_noGitCommandInstalled(self): @defer.inlineCallbacks def _dovccmd(command, abandonOnFailure=True, collectStdout=False, initialStdin=None): """ Simulate the case where there is no git command. """ yield return "command not found:" step = self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean') msg = 'git is not installed on worker' return self._test_WorkerTooOldError(_dovccmd, step, msg) def test_gitCommandOutputShowsNoVersion(self): @defer.inlineCallbacks def _dovccmd(command, abandonOnFailure=True, collectStdout=False, initialStdin=None): """ Instead of outputting something like "git version 2.11", simulate truncated output which has no version string, to exercise error handling. """ yield return "git " step = self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean') msg = 'git is not installed on worker' return self._test_WorkerTooOldError(_dovccmd, step, msg) def test_config_get_description_not_dict_or_boolean(self): with self.assertRaisesConfigError("Git: getDescription must be a boolean or a dict."): self.stepClass(repourl="http://github.com/buildbot/buildbot.git", getDescription=["list"]) def test_config_invalid_method_with_full(self): with self.assertRaisesConfigError("Git: invalid method for mode 'full'."): self.stepClass(repourl="http://github.com/buildbot/buildbot.git", mode='full', method='unknown') class TestGitPush(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitPush def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_push_simple(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_force(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch', '--force']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_fail(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch', '--force']) + ExpectShell.log('stdio', stderr="error: failed to push some refs to \n") + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_push_ssh_key_2_10(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'push', url, 'testbranch']) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_key_2_3(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_command = 'ssh -i "{0}"'.format(ssh_key_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_key_1_7(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshKey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.Builder.wkdir.buildbot/ssh-wrapper.sh' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH': ssh_wrapper_path}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_2_10(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' ssh_command_config = \ 'core.sshCommand=ssh -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.10.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', '-c', ssh_command_config, 'push', url, 'testbranch']) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_2_3(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' ssh_command = \ 'ssh -i "{0}" ' \ '-o "UserKnownHostsFile={1}"'.format(ssh_key_path, ssh_known_hosts_path) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 2.3.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH_COMMAND': ssh_command}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_push_ssh_host_key_1_7(self): url = 'ssh://github.com/test/test.git' self.setupStep( self.stepClass(workdir='wkdir', repourl=url, branch='testbranch', sshPrivateKey='sshkey', sshHostKey='sshhostkey')) ssh_workdir = '/wrk/.Builder.wkdir.buildbot' ssh_key_path = '/wrk/.Builder.wkdir.buildbot/ssh-key' ssh_wrapper_path = '/wrk/.Builder.wkdir.buildbot/ssh-wrapper.sh' ssh_known_hosts_path = '/wrk/.Builder.wkdir.buildbot/ssh-known-hosts' self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.0') + 0, Expect('mkdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_wrapper_path, workdir='wkdir', mode=0o700)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest=ssh_key_path, workdir='wkdir', mode=0o400)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader), workerdest=ssh_known_hosts_path, workdir='wkdir', mode=0o400)) + 0, ExpectShell(workdir='wkdir', command=['git', 'push', url, 'testbranch'], env={'GIT_SSH': ssh_wrapper_path}) + 0, Expect('rmdir', dict(dir=ssh_workdir, logEnviron=True)) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False url = 'ssh://github.com/test/test.git' step = self.stepClass(workdir='wkdir', repourl=url, branch='testbranch') self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerTooOldError) def test_config_fail_no_branch(self): with self.assertRaisesConfigError("GitPush: must provide branch"): self.stepClass(workdir='wkdir', repourl="url") class TestGitTag(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitTag def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_tag_annotated(self): messages = ['msg1', 'msg2'] self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True, messages=messages)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', '-a', 'myTag', '-m', 'msg1', '-m', 'msg2']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_simple(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_force(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', force=True)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag', '--force']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_tag_fail_already_exist(self): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'tag', 'myTag']) + ExpectShell.log('stdio', stderr="fatal: tag \'%s\' already exist\n") + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_config_annotated_no_messages(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True)) def test_config_no_tag_name(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir')) def test_config_not_annotated_but_meessages(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', messages=['msg'])) def test_config_annotated_message_not_list(self): with self.assertRaises(bbconfig.ConfigErrors): self.setupStep( self.stepClass(workdir='wkdir', tagName='myTag', annotated=True, messages="msg")) def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False step = self.stepClass(workdir='wdir', tagName='myTag') self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerTooOldError) class TestGitCommit(steps.BuildStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = git.GitCommit def setUp(self): self.setUpTestReactor() self.message_list = ['my commit', '42'] self.path_list = ['file1.txt', 'file2.txt'] return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_add_fail(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_commit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_disallow(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='disallow')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_commit_empty_allow(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='create-empty-commit')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42', '--allow-empty']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_withcommit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + ExpectShell.log('stdio', stdout='MM file2.txt\n?? file3.txt') + 0, ExpectShell(workdir='wkdir', command=['git', 'commit', '-m', 'my commit', '-m', '42']) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_withoutcommit(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + ExpectShell.log('stdio', stdout='?? file3.txt') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_commit_empty_ignore_witherror(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list, emptyCommits='ignore')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='refs/head/myBranch') + 0, ExpectShell(workdir='wkdir', command=['git', 'add', 'file1.txt', 'file2.txt']) + 0, ExpectShell(workdir='wkdir', command=['git', 'status', '--porcelain=v1']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_detached_head(self): self.setupStep( self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list)) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, ExpectShell(workdir='wkdir', command=['git', 'symbolic-ref', 'HEAD']) + ExpectShell.log('stdio', stdout='') + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_config_no_files_arg(self): with self.assertRaisesConfigError( "GitCommit: must provide paths"): self.stepClass(workdir='wkdir', messages=self.message_list) def test_config_files_not_a_list(self): with self.assertRaisesConfigError( "GitCommit: paths must be a list"): self.stepClass(workdir='wkdir', paths="test.txt", messages=self.message_list) def test_config_no_messages_arg(self): with self.assertRaisesConfigError( "GitCommit: must provide messages"): self.stepClass(workdir='wkdir', paths=self.path_list) def test_config_messages_not_a_list(self): with self.assertRaisesConfigError( "GitCommit: messages must be a list"): self.stepClass(workdir='wkdir', paths=self.path_list, messages="my message") def test_raise_no_git(self): @defer.inlineCallbacks def _checkFeatureSupport(self): yield return False step = self.stepClass(workdir='wkdir', paths=self.path_list, messages=self.message_list) self.patch(self.stepClass, "checkFeatureSupport", _checkFeatureSupport) self.setupStep(step) self.expectOutcome(result=EXCEPTION) self.runStep() self.flushLoggedErrors(WorkerTooOldError) buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_github.py000066400000000000000000000116331361162603000260530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.process.results import SUCCESS from buildbot.steps.source import github from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.unit import test_steps_source_git # GitHub step shall behave exactly like Git, and thus is inheriting its tests class TestGitHub(test_steps_source_git.TestGit): stepClass = github.GitHub def test_with_merge_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), dict(branch='refs/pull/1234/merge', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # here we always ignore revision, and fetch the merge branch ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'http://github.com/buildbot/buildbot.git', 'refs/pull/1234/merge', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/pull/1234/merge']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitHub') return self.runStep() def test_with_head_branch(self): self.setupStep( self.stepClass(repourl='http://github.com/buildbot/buildbot.git', mode='full', method='clean'), dict(branch='refs/pull/1234/head', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # in the case of the head, we try to find if the head is already present # and reset to that without fetching ExpectShell(workdir='wkdir', command=['git', 'cat-file', '-e', '12345678']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', '12345678', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'refs/pull/1234/head']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitHub') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_gitlab.py000066400000000000000000000102771361162603000260360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.process.results import SUCCESS from buildbot.steps.source import gitlab from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestGitLab(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): stepClass = gitlab.GitLab def setUp(self): self.setUpTestReactor() self.sourceName = self.stepClass.__name__ return self.setUpSourceStep() def setupStep(self, step, args, **kwargs): step = super().setupStep(step, args, **kwargs) step.build.properties.setProperty("source_branch", "ms-viewport", "gitlab source branch") step.build.properties.setProperty("source_git_ssh_url", "git@gitlab.example.com:build/awesome_project.git", "gitlab source git ssh url") step.build.properties.setProperty("source_project_id", 2337, "gitlab source project ID") step.build.properties.setProperty("target_branch", "master", "gitlab target branch") step.build.properties.setProperty("target_git_ssh_url", "git@gitlab.example.com:mmusterman/awesome_project.git", "gitlab target git ssh url") step.build.properties.setProperty("target_project_id", 239, "gitlab target project ID") return step def tearDown(self): return self.tearDownSourceStep() def test_with_merge_branch(self): self.setupStep( self.stepClass(repourl='git@gitlab.example.com:mmusterman/awesome_project.git', mode='full', method='clean'), dict(branch='master', revision='12345678')) self.expectCommands( ExpectShell(workdir='wkdir', command=['git', '--version']) + ExpectShell.log('stdio', stdout='git version 1.7.5') + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('listdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + Expect.update('files', ['.git']) + 0, ExpectShell(workdir='wkdir', command=['git', 'clean', '-f', '-f', '-d']) + 0, # here we always ignore revision, and fetch the merge branch ExpectShell(workdir='wkdir', command=['git', 'fetch', '-t', 'git@gitlab.example.com:build/awesome_project.git', 'ms-viewport', '--progress']) + 0, ExpectShell(workdir='wkdir', command=['git', 'reset', '--hard', 'FETCH_HEAD', '--']) + 0, ExpectShell(workdir='wkdir', command=['git', 'checkout', '-B', 'ms-viewport']) + 0, ExpectShell(workdir='wkdir', command=['git', 'rev-parse', 'HEAD']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'f6ad368298bd941e934a41f3babc827b2aa95a1d', 'GitLab') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_mercurial.py000066400000000000000000001423001361162603000265500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import mercurial from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestMercurial(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_workerVersionIsOlderThan(self, result): self.patch( mercurial.Mercurial, 'workerVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(mode="full") def test_incorrect_mode(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='invalid') def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', method='invalid') def test_incorrect_branchType(self): with self.assertRaises(config.ConfigErrors): mercurial.Mercurial(repourl='http://hg.mozilla.org', branchType='invalid') def test_mode_full_clean(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_win32path(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.build.path_module = namedModule('ntpath') self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_timeout(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', timeout=1, mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=[ 'hg', '--verbose', 'import', '--no-commit', '-p', '1', '-'], initialStdin='patch') + 1, ) self.expectOutcome(result=FAILURE, state_string="update (failure)") return self.runStep() def test_mode_full_clean_no_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clobber(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clobber', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge', '--all']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh_no_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=True) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_no_existing_repo_dirname(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='dirname'), ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_retry(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='dirname', retry=(0, 1)), ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change_dirname(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org/', mode='incremental', branchType='dirname', defaultBranch='devel'), dict(branch='stable') ) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org/stable']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org/stable', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_no_existing_repo_inrepo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 1, # does not exist ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo_added_files(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + ExpectShell.log('stdio', stdout='foo\nbar/baz\n') + 1, Expect('rmdir', dict(dir=['wkdir/foo', 'wkdir/bar/baz'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_existing_repo_added_files_old_rmdir(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo')) self.patch_workerVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, # directory exists ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + ExpectShell.log('stdio', stdout='foo\nbar/baz\n') + 1, Expect('rmdir', dict(dir='wkdir/foo', logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir/bar/baz', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo'), dict( revision='abcdef01', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'abcdef01']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo'), dict( branch='stable', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'clone', '--noupdate', 'http://hg.mozilla.org', '.']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_branch_change_no_clobberOnBranchChange(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='incremental', branchType='inrepo', clobberOnBranchChange=False), dict( branch='stable', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch']) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()']) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'stable']) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n']) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_env(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo', env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()'], env={'abc': '123'}) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_full_clean_logEnviron(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo', logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.hg', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--config', 'extensions.purge=', 'purge'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'pull', 'http://hg.mozilla.org', '--rev', 'default'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'identify', '--branch'], logEnviron=False) + ExpectShell.log('stdio', stdout='default') + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'locate', 'set:added()'], logEnviron=False) + 1, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'update', '--clean', '--rev', 'default'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['hg', '--verbose', 'parents', '--template', '{node}\\n'], logEnviron=False) + ExpectShell.log('stdio', stdout='\n') + ExpectShell.log('stdio', stdout='f6ad368298bd941e934a41f3babc827b2aa95a1d') + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_command_fails(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='fresh', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( mercurial.Mercurial(repourl='http://hg.mozilla.org', mode='full', method='clean', branchType='inrepo')) self.expectCommands( ExpectShell(workdir='wkdir', command=['hg', '--verbose', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_mtn.py000066400000000000000000001376541361162603000254030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import error from twisted.trial import unittest from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import mtn from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import config from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin class TestMonotone(sourcesteps.SourceStepMixin, config.ConfigErrorsMixin, TestReactorMixin, unittest.TestCase): # Just some random revision id to test. REVID = '95215e2a9a9f8b6f5c9664e3807cd34617ea928c' MTN_VER = 'monotone 1.0 (base revision: UNKNOWN_REV)' def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def test_mode_full_clean(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch_worker_2_16(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_patch_fail(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 1, ) self.expectOutcome(result=FAILURE, state_string="update (failure)") return self.runStep() def test_mode_full_clean_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_no_existing_checkout(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clean_from_scratch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clobber(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clobber', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_clobber_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clobber', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_no_existing_db(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_no_existing_checkout(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_from_scratch(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 1, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'checkout', 'wkdir', '--db', 'db.mtn', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 1, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_fresh(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='fresh', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'ignored']) + ExpectShell.log('stdio', stdout='file3\nfile4') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2', 'wkdir/file3', 'wkdir/file4'], logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master'), dict(revision='abcdef01',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'abcdef01', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout='abcdef019a9f8b6f5c9664e3807cd34617ea928c') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty( 'got_revision', 'abcdef019a9f8b6f5c9664e3807cd34617ea928c', 'Monotone') return self.runStep() def test_mode_full_copy(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='copy', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_mode_full_no_method(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'build', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='build', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_incorrect_method(self): with self.assertRaisesConfigError( "Invalid method for mode == full"): mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='wrongmethod', branch='master') def test_incremental_invalid_method(self): with self.assertRaisesConfigError( "Incremental mode does not require method"): mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', method='fresh', branch="master") def test_repourl(self): with self.assertRaisesConfigError("must provide repourl"): mtn.Monotone(mode="full", branch="master") def test_branch(self): with self.assertRaisesConfigError("must provide branch"): mtn.Monotone(repourl='mtn://localhost/monotone', mode="full",) def test_mode_incremental_patched(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='') + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'ls', 'unknown']) + ExpectShell.log('stdio', stdout='file1\nfile2') + 0, Expect('rmdir', dict(dir=['wkdir/file1', 'wkdir/file2'], logEnviron=True)) + 0, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_worker_connection_lost(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='full', method='clean', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_database_migration(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='migration needed') + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'migrate', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_database_invalid(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='not a monotone database') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_database_too_new(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='too new, cannot use') + 0, Expect('rmdir', dict(dir='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() def test_database_empty(self): self.setupStep( mtn.Monotone(repourl='mtn://localhost/monotone', mode='incremental', branch='master')) self.expectCommands( ExpectShell(workdir='wkdir', command=['mtn', '--version']) + ExpectShell.log('stdio', stdout=self.MTN_VER) + 0, Expect('stat', dict(file='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'info', '--db', 'db.mtn']) + ExpectShell.log('stdio', stdout='database has no tables') + 0, Expect('rmdir', dict(dir='db.mtn', logEnviron=True)) + 0, ExpectShell(workdir='.', command=['mtn', 'db', 'init', '--db', 'db.mtn']) + 0, ExpectShell(workdir='.', command=['mtn', 'pull', 'mtn://localhost/monotone?master', '--db', 'db.mtn', '--ticker=dot']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/_MTN', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'update', '--revision', 'h:master', '--branch', 'master']) + 0, ExpectShell(workdir='wkdir', command=['mtn', 'automate', 'select', 'w:']) + ExpectShell.log('stdio', stdout=self.REVID) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', self.REVID, 'Monotone') return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_p4.py000066400000000000000000000737051361162603000251240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Portions Copyright 2013 Bad Dog Consulting import platform import textwrap from twisted.internet import error from twisted.python import reflect from twisted.trial import unittest from buildbot import config from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source.p4 import P4 from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.properties import ConstantRenderable _is_windows = (platform.system() == 'Windows') class TestP4(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def setupStep(self, step, args=None, patch=None, **kwargs): if args is None: args = {} step = super().setupStep(step, args={}, patch=None, **kwargs) self.build.getSourceStamp().revision = args.get('revision', None) # builddir property used to create absolute path required in perforce # client spec. workspace_dir = '/home/user/workspace' if _is_windows: workspace_dir = r'C:\Users\username\Workspace' self.build.path_module = reflect.namedModule("ntpath") self.properties.setProperty('builddir', workspace_dir, 'P4') def test_no_empty_step_config(self): with self.assertRaises(config.ConfigErrors): P4() def test_no_multiple_type_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4viewspec=('//depot/trunk', ''), p4base='//depot', p4branch='trunk', p4extra_views=['src', 'doc']) def test_no_p4viewspec_is_string_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4viewspec='a_bad_idea') def test_no_p4base_has_trailing_slash_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4base='//depot/') def test_no_p4branch_has_trailing_slash_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4base='//depot', p4branch='blah/') def test_no_p4branch_with_no_p4base_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4branch='blah') def test_no_p4extra_views_with_no_p4base_step_config(self): with self.assertRaises(config.ConfigErrors): P4(p4extra_views='blah') def test_incorrect_mode(self): with self.assertRaises(config.ConfigErrors): P4(p4base='//depot', mode='invalid') def test_mode_incremental_p4base_with_revision(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), dict(revision='100',)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self.expectCommands( ExpectShell(workdir='wkdir', # defaults to this, only changes if it has a copy mode. command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'client', '-i'], initialStdin=client_spec) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'sync', '//depot...@100']) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def _incremental(self, client_stdin='', extra_args=None, workdir='wkdir', timeout=20 * 60): if extra_args is None: extra_args = [] self.expectCommands( ExpectShell(workdir=workdir, command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir=workdir, timeout=timeout, command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'client', '-i'], initialStdin=client_stdin,) + 0, ExpectShell(workdir=workdir, timeout=timeout, command=(['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1'] + extra_args + ['sync'])) + 0, ExpectShell(workdir=workdir, timeout=timeout, command=['p4', '-p', 'localhost:12000', '-u', 'user', '-P', ('obfuscated', 'pass', 'XXXXXX'), '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def test_mode_incremental_p4base(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4base_with_no_branch(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot/trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4base_with_p4extra_views(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4extra_views=[('-//depot/trunk/test', 'test'), ('-//depot/trunk/doc', 'doc'), ('-//depot/trunk/white space', 'white space')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... \t-//depot/trunk/test/... //p4_client1/test/... \t-//depot/trunk/doc/... //p4_client1/doc/... \t"-//depot/trunk/white space/..." "//p4_client1/white space/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4viewspec(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4viewspec=[('//depot/trunk/', ''), ('//depot/white space/', 'white space/'), ('-//depot/white space/excluded/', 'white space/excluded/')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... \t"//depot/white space/..." "//p4_client1/white space/..." \t"-//depot/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4viewspec_suffix(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4viewspec_suffix=None, p4viewspec=[('//depot/trunk/foo.xml', 'bar.xml'), ('//depot/white space/...', 'white space/...'), ('-//depot/white space/excluded/...', 'white space/excluded/...')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/foo.xml //p4_client1/bar.xml \t"//depot/white space/..." "//p4_client1/white space/..." \t"-//depot/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_p4client_spec_options(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4client_spec_options='rmdir compress', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\trmdir compress LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec) def test_mode_incremental_parent_workdir(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', workdir='../another_wkdir')) root_dir = '/home/user/another_wkdir' if _is_windows: root_dir = r'C:\Users\username\another_wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, workdir='../another_wkdir') def test_mode_incremental_p4extra_args(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', p4extra_args=['-Zproxyload'])) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, extra_args=['-Zproxyload']) def test_mode_incremental_timeout(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', timeout=60 * 60)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._incremental(client_stdin=client_spec, timeout=60 * 60) def _full(self, client_stdin='', p4client='p4_client1', p4user='user', workdir='wkdir', extra_args=None, obfuscated_pass=True): if extra_args is None: extra_args = [] if obfuscated_pass: expected_pass = ('obfuscated', 'pass', 'XXXXXX') else: expected_pass = 'pass' self.expectCommands( ExpectShell(workdir=workdir, command=['p4', '-V']) # expected remote command + 0, # expected exit status ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client, 'client', '-i'], initialStdin=client_stdin) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client] + extra_args + ['sync', '#none']) + 0, Expect('rmdir', {'dir': workdir, 'logEnviron': True}) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client] + extra_args + ['sync']) + 0, ExpectShell(workdir=workdir, command=['p4', '-p', 'localhost:12000', '-u', p4user, '-P', expected_pass, '-c', p4client, 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'P4') return self.runStep() def test_mode_full_p4base(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_p4base_not_obfuscated(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), worker_version={'*': '2.15'}) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin, obfuscated_pass=False) def test_mode_full_p4base_with_no_branch(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot/trunk', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_p4viewspec(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4viewspec=[('//depot/main/', ''), ('//depot/main/white space/', 'white space/'), ('-//depot/main/white space/excluded/', 'white space/excluded/')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/main/... //p4_client1/... \t"//depot/main/white space/..." "//p4_client1/white space/..." \t"-//depot/main/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_renderable_p4base(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base=ConstantRenderable('//depot'), p4branch='release/1.0', p4user='user', p4client='p4_client2', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client2 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/release/1.0/... //p4_client2/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4client='p4_client2') def test_mode_full_renderable_p4client(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client=ConstantRenderable('p4_client_render'), p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client_render Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client_render/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4client='p4_client_render') def test_mode_full_renderable_p4branch(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch=ConstantRenderable('render_branch'), p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_branch/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin) def test_mode_full_renderable_p4viewspec(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4viewspec=[(ConstantRenderable('//depot/render_trunk/'), '')], p4user='different_user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: different_user Description: \tCreated by different_user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_trunk/... //p4_client1/...\n''' % root_dir) self._full(client_stdin=client_stdin, p4user='different_user') def test_mode_full_p4viewspec_suffix(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4viewspec_suffix=None, p4viewspec=[('//depot/trunk/foo.xml', 'bar.xml'), ('//depot/trunk/white space/...', 'white space/...'), ('-//depot/trunk/white space/excluded/...', 'white space/excluded/...')], p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/foo.xml //p4_client1/bar.xml \t"//depot/trunk/white space/..." "//p4_client1/white space/..." \t"-//depot/trunk/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_p4client_spec_options(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4client_spec_options='rmdir compress', p4user='user', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\trmdir compress LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec) def test_mode_full_parent_workdir(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', workdir='../another_wkdir')) root_dir = '/home/user/another_wkdir' if _is_windows: root_dir = r'C:\Users\username\another_wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec, workdir='../another_wkdir') def test_mode_full_p4extra_args(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', p4extra_args=['-Zproxyload'])) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self._full(client_stdin=client_spec, extra_args=['-Zproxyload']) def test_worker_connection_lost(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass'), dict(revision='100',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['p4', '-V']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_ticket_auth(self): self.setupStep(P4(p4port='localhost:12000', p4base='//depot', p4branch='trunk', p4user='user', p4client='p4_client1', p4passwd='pass', use_tickets=True)) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) self.expectCommands( ExpectShell(workdir='wkdir', command=['p4', '-V']) + 0, # This is the extra step that gets run when using tickets, # and the password is not passed anymore after that. ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'login'], initialStdin='pass\n') + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'client', '-i'], initialStdin=client_spec) + 0, ExpectShell(workdir='wkdir', command=(['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'sync'])) + 0, ExpectShell(workdir='wkdir', command=['p4', '-p', 'localhost:12000', '-u', 'user', '-c', 'p4_client1', 'changes', '-m1', '#have']) + ExpectShell.log('stdio', stdout="Change 100 on 2013/03/21 by user@machine \'duh\'") + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_repo.py000066400000000000000000000634101361162603000255360ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.changes.changes import Change from buildbot.process.properties import Properties from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps.source import repo from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin from .test_changes_gerritchangesource import TestGerritChangeSource class RepoURL(unittest.TestCase): # testcases taken from old_source/Repo test def oneTest(self, props, expected): p = Properties() p.update(props, "test") r = repo.RepoDownloadsFromProperties(list(props)) self.assertEqual(sorted(r.getRenderingFor(p)), sorted(expected)) def test_parse1(self): self.oneTest( {'a': "repo download test/bla 564/12"}, ["test/bla 564/12"]) def test_parse2(self): self.oneTest( {'a': "repo download test/bla 564/12 repo download test/bla 564/2"}, ["test/bla 564/12", "test/bla 564/2"]) self.oneTest({'a': "repo download test/bla 564/12", 'b': "repo download test/bla 564/2"}, [ "test/bla 564/12", "test/bla 564/2"]) def test_parse3(self): self.oneTest({'a': "repo download test/bla 564/12 repo download test/bla 564/2 test/foo 5/1"}, [ "test/bla 564/12", "test/bla 564/2", "test/foo 5/1"]) self.oneTest( {'a': "repo download test/bla 564/12"}, ["test/bla 564/12"]) class TestRepo(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.shouldRetry = False self.logEnviron = True return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def shouldLogEnviron(self): r = self.logEnviron self.logEnviron = False return r def ExpectShell(self, **kw): if 'workdir' not in kw: kw['workdir'] = 'wkdir' if 'logEnviron' not in kw: kw['logEnviron'] = self.shouldLogEnviron() return ExpectShell(**kw) def mySetupStep(self, **kwargs): if "repoDownloads" not in kwargs: kwargs.update( dict(repoDownloads=repo.RepoDownloadsFromProperties(["repo_download", "repo_download2"]))) self.setupStep( repo.Repo(manifestURL='git://myrepo.com/manifest.git', manifestBranch="mb", manifestFile="mf", **kwargs)) self.build.allChanges = lambda x=None: [] def myRunStep(self, result=SUCCESS, state_string=None): self.expectOutcome(result=result, state_string=state_string) return self.runStep() def expectClobber(self): # stat return 1 so we clobber self.expectCommands( Expect('stat', dict(file='wkdir/.repo', logEnviron=self.logEnviron)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=self.logEnviron)) + 0, Expect('mkdir', dict(dir='wkdir', logEnviron=self.logEnviron)) + 0, ) def expectnoClobber(self): # stat return 0, so nothing self.expectCommands( Expect('stat', dict(file='wkdir/.repo', logEnviron=self.logEnviron)) + 0, ) def expectRepoSync(self, which_fail=-1, breakatfail=False, depth=0, syncoptions=None, override_commands=None): if syncoptions is None: syncoptions = ["-c"] if override_commands is None: override_commands = [] commands = [ self.ExpectShell( command=[ 'bash', '-c', self.step._getCleanupCommand()]), self.ExpectShell( command=['repo', 'init', '-u', 'git://myrepo.com/manifest.git', '-b', 'mb', '-m', 'mf', '--depth', str(depth)]) ] + override_commands + [ self.ExpectShell(command=['repo', 'sync', '--force-sync'] + syncoptions), self.ExpectShell( command=['repo', 'manifest', '-r', '-o', 'manifest-original.xml']) ] for i, command in enumerate(commands): self.expectCommands(command + (which_fail == i and 1 or 0)) if which_fail == i and breakatfail: break def test_basic(self): """basic first time repo sync""" self.mySetupStep(repoDownloads=None) self.expectClobber() self.expectRepoSync() return self.myRunStep() def test_basic_depth(self): """basic first time repo sync""" self.mySetupStep(repoDownloads=None, depth=2) self.expectClobber() self.expectRepoSync(depth=2) return self.myRunStep() def test_update(self): """basic second time repo sync""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync() return self.myRunStep() def test_jobs(self): """basic first time repo sync with jobs""" self.mySetupStep(jobs=2) self.expectClobber() self.expectRepoSync(syncoptions=["-j2", "-c"]) return self.myRunStep() def test_sync_all_branches(self): """basic first time repo sync with all branches""" self.mySetupStep(syncAllBranches=True) self.expectClobber() self.expectRepoSync(syncoptions=[]) return self.myRunStep() def test_manifest_override(self): """repo sync with manifest_override_url property set download via wget """ self.mySetupStep(manifestOverrideUrl="http://u.rl/test.manifest", syncAllBranches=True) self.expectClobber() override_commands = [ Expect( 'stat', dict(file='wkdir/http://u.rl/test.manifest', logEnviron=False)), self.ExpectShell(logEnviron=False, command=['wget', 'http://u.rl/test.manifest', '-O', 'manifest_override.xml']), self.ExpectShell( logEnviron=False, workdir='wkdir/.repo', command=['ln', '-sf', '../manifest_override.xml', 'manifest.xml']) ] self.expectRepoSync(which_fail=2, syncoptions=[], override_commands=override_commands) return self.myRunStep() def test_manifest_override_local(self): """repo sync with manifest_override_url property set copied from local FS """ self.mySetupStep(manifestOverrideUrl="test.manifest", syncAllBranches=True) self.expectClobber() override_commands = [ Expect('stat', dict(file='wkdir/test.manifest', logEnviron=False)), self.ExpectShell(logEnviron=False, command=[ 'cp', '-f', 'test.manifest', 'manifest_override.xml']), self.ExpectShell(logEnviron=False, workdir='wkdir/.repo', command=['ln', '-sf', '../manifest_override.xml', 'manifest.xml']) ] self.expectRepoSync( syncoptions=[], override_commands=override_commands) return self.myRunStep() def test_tarball(self): """repo sync using the tarball cache """ self.mySetupStep(tarball="/tarball.tar") self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar', '-xvf', '/tarball.tar']) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.tar']) + Expect.log('stdio', stdout=str(10000)) + 0) self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10000 + 7 * 24 * 3600)) + 0) return self.myRunStep() def test_create_tarball(self): """repo sync create the tarball if its not here """ self.mySetupStep(tarball="/tarball.tgz") self.expectClobber() self.expectCommands( self.ExpectShell( command=['tar', '-z', '-xvf', '/tarball.tgz']) + 1, self.ExpectShell(command=['rm', '-f', '/tarball.tgz']) + 1, Expect('rmdir', dict(dir='wkdir/.repo', logEnviron=False)) + 1) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.tgz']) + Expect.log('stdio', stderr="file not found!") + 1, self.ExpectShell(command=['tar', '-z', '-cvf', '/tarball.tgz', '.repo']) + 0) return self.myRunStep() def do_test_update_tarball(self, suffix, option): """repo sync update the tarball cache at the end (tarball older than a week) """ self.mySetupStep(tarball="/tarball." + suffix) self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar'] + option + ['-xvf', '/tarball.' + suffix]) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 0) return self.myRunStep() def test_update_tarball(self): self.do_test_update_tarball("tar", []) def test_update_tarball_gz(self): """tarball compression variants""" self.do_test_update_tarball("tar.gz", ["-z"]) def test_update_tarball_tgz(self): self.do_test_update_tarball("tgz", ["-z"]) def test_update_tarball_pigz(self): self.do_test_update_tarball("pigz", ["-I", "pigz"]) def test_update_tarball_bzip(self): self.do_test_update_tarball("tar.bz2", ["-j"]) def test_update_tarball_lzma(self): self.do_test_update_tarball("tar.lzma", ["--lzma"]) def test_update_tarball_lzop(self): self.do_test_update_tarball("tar.lzop", ["--lzop"]) def test_update_tarball_fail1(self, suffix="tar", option=None): """tarball extract fail -> remove the tarball + remove .repo dir """ if option is None: option = [] self.mySetupStep(tarball="/tarball." + suffix) self.expectClobber() self.expectCommands( self.ExpectShell( command=[ 'tar'] + option + ['-xvf', '/tarball.' + suffix]) + 1, self.ExpectShell( command=['rm', '-f', '/tarball.tar']) + 0, Expect( 'rmdir', dict(dir='wkdir/.repo', logEnviron=False)) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 0) return self.myRunStep() def test_update_tarball_fail2(self, suffix="tar", option=None): """tarball update fail -> remove the tarball + continue repo download """ if option is None: option = [] self.mySetupStep(tarball="/tarball." + suffix) self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectClobber() self.expectCommands( self.ExpectShell(command=['tar'] + option + ['-xvf', '/tarball.' + suffix]) + 0) self.expectRepoSync() self.expectCommands(self.ExpectShell(command=['stat', '-c%Y', '/tarball.' + suffix]) + Expect.log('stdio', stdout=str(10000)) + 0, self.ExpectShell(command=['stat', '-c%Y', '.']) + Expect.log( 'stdio', stdout=str(10001 + 7 * 24 * 3600)) + 0, self.ExpectShell(command=['tar'] + option + ['-cvf', '/tarball.' + suffix, '.repo']) + 1, self.ExpectShell( command=['rm', '-f', '/tarball.tar']) + 0, self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads(self): """basic repo download, and check that repo_downloaded is updated""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_repo_downloads2(self): """2 repo downloads""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.build.setProperty("repo_download2", "repo download test/bla2 565/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0, self.ExpectShell( command=['repo', 'download', 'test/bla2', '565/12']) + 0) return self.myRunStep() def test_repo_download_manifest(self): """2 repo downloads, with one manifest patch""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.build.setProperty("repo_download2", "repo download manifest 565/12", "test") self.expectnoClobber() self.expectCommands( self.ExpectShell( command=['bash', '-c', self.step._getCleanupCommand()]) + 0, self.ExpectShell( command=['repo', 'init', '-u', 'git://myrepo.com/manifest.git', '-b', 'mb', '-m', 'mf', '--depth', '0']) + 0, self.ExpectShell( workdir='wkdir/.repo/manifests', command=[ 'git', 'fetch', 'git://myrepo.com/manifest.git', 'refs/changes/65/565/12']) + 0, self.ExpectShell( workdir='wkdir/.repo/manifests', command=['git', 'cherry-pick', 'FETCH_HEAD']) + 0, self.ExpectShell(command=['repo', 'sync', '--force-sync', '-c']) + 0, self.ExpectShell( command=['repo', 'manifest', '-r', '-o', 'manifest-original.xml']) + 0) self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads_mirror_sync(self): """repo downloads, with mirror synchronization issues""" self.mySetupStep() # we don't really want the test to wait... self.step.mirror_sync_sleep = 0.001 self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0) return self.myRunStep() def test_repo_downloads_change_missing(self): """repo downloads, with no actual mirror synchronization issues (still retries 2 times)""" self.mySetupStep() # we don't really want the test to wait... self.step.mirror_sync_sleep = 0.001 self.step.mirror_sync_retry = 1 # on retry once self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log( "stdio", stderr="fatal: Couldn't find remote ref \n"), ) return self.myRunStep(result=FAILURE, state_string="repo: change test/bla 564/12 does not exist (failure)") def test_repo_downloads_fail1(self): """repo downloads, cherry-pick returns 1""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 1 + Expect.log("stdio", stderr="patch \n"), self.ExpectShell( command=['repo', 'forall', '-c', 'git', 'diff', 'HEAD']) + 0 ) return self.myRunStep(result=FAILURE, state_string="download failed: test/bla 564/12 (failure)") def test_repo_downloads_fail2(self): """repo downloads, cherry-pick returns 0 but error in stderr""" self.mySetupStep() self.build.setProperty("repo_download", "repo download test/bla 564/12", "test") self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell( command=['repo', 'download', 'test/bla', '564/12']) + 0 + Expect.log("stdio", stderr="Automatic cherry-pick failed \n"), self.ExpectShell( command=['repo', 'forall', '-c', 'git', 'diff', 'HEAD']) + 0 ) return self.myRunStep(result=FAILURE, state_string="download failed: test/bla 564/12 (failure)") def test_repo_downloads_from_change_source(self): """basic repo download from change source, and check that repo_downloaded is updated""" self.mySetupStep(repoDownloads=repo.RepoDownloadsFromChangeSource()) chdict = TestGerritChangeSource.expected_change change = Change(None, None, None, properties=chdict['properties']) self.build.allChanges = lambda x=None: [change] self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell(command=['repo', 'download', 'pr', '4321/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_repo_downloads_from_change_source_codebase(self): """basic repo download from change source, and check that repo_downloaded is updated""" self.mySetupStep( repoDownloads=repo.RepoDownloadsFromChangeSource("mycodebase")) chdict = TestGerritChangeSource.expected_change change = Change(None, None, None, properties=chdict['properties']) # getSourceStamp is faked by SourceStepMixin ss = self.build.getSourceStamp("") ss.changes = [change] self.expectnoClobber() self.expectRepoSync() self.expectCommands( self.ExpectShell(command=['repo', 'download', 'pr', '4321/12']) + 0 + Expect.log( 'stdio', stderr="test/bla refs/changes/64/564/12 -> FETCH_HEAD\n") + Expect.log('stdio', stderr="HEAD is now at 0123456789abcdef...\n")) self.expectProperty( "repo_downloaded", "564/12 0123456789abcdef ", "Source") return self.myRunStep() def test_update_fail1(self): """ fail at cleanup: ignored""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=0, breakatfail=False) return self.myRunStep() def test_update_fail2(self): """fail at repo init: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_fail3(self): """ fail at repo sync: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_fail4(self): """fail at repo manifest: clobber""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.expectClobber() self.expectRepoSync() self.shouldRetry = True return self.myRunStep() def test_update_doublefail(self): """fail at repo manifest: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=3, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo manifest (failure)") def test_update_doublefail2(self): """fail at repo sync: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=2, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo sync (failure)") def test_update_doublefail3(self): """fail at repo init: clobber but still fail""" self.mySetupStep() self.expectnoClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.expectClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo init (failure)") def test_basic_fail(self): """fail at repo init: no need to re-clobber but still fail""" self.mySetupStep() self.expectClobber() self.expectRepoSync(which_fail=1, breakatfail=True) self.shouldRetry = True return self.myRunStep(result=FAILURE, state_string="repo failed at: repo init (failure)") buildbot-2.6.0/master/buildbot/test/unit/test_steps_source_svn.py000066400000000000000000002625241361162603000254060ustar00rootroot00000000000000# -*- coding: utf8 -*- # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest from buildbot import config from buildbot.process import buildstep from buildbot.process import remotetransfer from buildbot.process.results import FAILURE from buildbot.process.results import RETRY from buildbot.process.results import SUCCESS from buildbot.steps.source import svn from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.properties import ConstantRenderable class TestSVN(sourcesteps.SourceStepMixin, TestReactorMixin, unittest.TestCase): svn_st_xml = """ """ svn_st_xml_corrupt = """ """ svn_st_xml_empty = """ """ svn_info_stdout_xml = """ http://svn.red-bean.com/repos/test http://svn.red-bean.com/repos/test 5e7d134a-54fb-0310-bd04-b611643e5c25 normal infinity sally 2003-01-15T23:35:12.847647Z """ svn_info_stdout_xml_nonintegerrevision = """ http://svn.red-bean.com/repos/test http://svn.red-bean.com/repos/test 5e7d134a-54fb-0310-bd04-b611643e5c25 normal infinity sally 2003-01-15T23:35:12.847647Z """ def setUp(self): self.setUpTestReactor() return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_workerVersionIsOlderThan(self, result): self.patch(svn.SVN, 'workerVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): with self.assertRaises(config.ConfigErrors): svn.SVN() def test_incorrect_mode(self): with self.assertRaises(config.ConfigErrors): svn.SVN(repourl='http://svn.local/app/trunk', mode='invalid') def test_incorrect_method(self): with self.assertRaises(config.ConfigErrors): svn.SVN(repourl='http://svn.local/app/trunk', method='invalid') def test_svn_not_installed(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_corrupt_xml(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_corrupt) + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() @defer.inlineCallbacks def test_revision_noninteger(self): svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml_nonintegerrevision) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', 'a10', 'SVN') yield self.runStep() revision = self.step.getProperty('got_revision') with self.assertRaises(ValueError): int(revision) def test_revision_missing(self): """Fail if 'revision' tag isn't there""" svn_info_stdout = self.svn_info_stdout_xml.replace('entry', 'Blah') svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_mode_incremental(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', timeout=1, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_renderable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_canonical(self): self.setupStep( svn.SVN(repourl='http://svn.local/trunk/test app', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/trunk/test%20app""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable_svninfo_mismatch(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', # expecting ../trunk/app stdout="""http://svn.local/branch/foo/app""") + 0, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_win32path(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.build.path_module = namedModule("ntpath") self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mode_incremental_preferLastChangedRev(self): """Give the last-changed rev if 'preferLastChangedRev' is set""" self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '90', 'SVN') return self.runStep() def test_mode_incremental_preferLastChangedRev_butMissing(self): """If 'preferLastChangedRev' is set, but missing, fall back to the regular revision value.""" svn_info_stdout = self.svn_info_stdout_xml.replace('commit', 'Blah') self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_keep_on_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_old_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_workerVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file1', 'logEnviron': True, 'timeout': 1200}) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file2_uniçode', 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_new_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_workerVersionIsOlderThan(False) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy', codebase='app')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/app/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source/app', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source/app', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source/app', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', {'app': '100'}, 'SVN') return self.runStep() def test_mode_full_copy_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], logEnviron=True, timeout=1200)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), workerdest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch_worker_2_16(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch'), worker_version={'*': '2.16'}) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2_uniçode'], logEnviron=True, timeout=1200)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=32768, maxsize=None, reader=ExpectRemoteRef( remotetransfer.StringFileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', timeout=1, mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', timeout=1, command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', '--revision', '100', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_auth(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export', username='svn_username', password='svn_password')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + 0, ExpectShell(workdir='', command=['svn', 'export', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX'), 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_with_env(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_logEnviron(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], logEnviron=False) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_command_fails(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_bogus_svnversion(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log( 'stdio', stdout='' '' 'http://svn.local/app/trunk' '') + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout='1x0y0') + 0, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True, 'timeout': 1200}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_cpdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True, timeout=1200)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_rmdir_fails_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2_uniçode'], 'logEnviron': True, 'timeout': 1200}) + 1, ) self.expectOutcome(result=FAILURE) return self.runStep() def test_worker_connection_lost(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, state_string="update (retry)") return self.runStep() def test_empty_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_omit_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + ExpectShell.log('stdio', stdout="""http://svn.local/app/trunk""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS) return self.runStep() class TestGetUnversionedFiles(unittest.TestCase): def test_getUnversionedFiles_does_not_list_externals(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_does_not_list_missing(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual([], unversioned_files) def test_getUnversionedFiles_corrupted_xml(self): svn_st_xml_corrupt = """ """ with self.assertRaises(buildstep.BuildStepFailed): list(svn.SVN.getUnversionedFiles(svn_st_xml_corrupt, [])) def test_getUnversionedFiles_no_path(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual([], unversioned_files) def test_getUnversionedFiles_no_item(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_unicode(self): svn_st_xml = """ """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEqual( ["Path/To/Content/Developers/François"], unversioned_files) class TestSvnUriCanonicalize(unittest.TestCase): # svn.SVN.svnUriCanonicalize() test method factory # # given input string and expected result create a test method that # will call svn.SVN.svnUriCanonicalize() with the input and check # that expected result is returned # # @param input: test input # @param exp: expected result def _makeSUCTest(input, exp): return lambda self: self.assertEqual( svn.SVN.svnUriCanonicalize(input), exp) test_empty = _makeSUCTest( "", "") test_canonical = _makeSUCTest( "http://foo.com/bar", "http://foo.com/bar") test_lc_scheme = _makeSUCTest( "hTtP://foo.com/bar", "http://foo.com/bar") test_trailing_dot = _makeSUCTest( "http://foo.com./bar", "http://foo.com/bar") test_lc_hostname = _makeSUCTest( "http://foO.COm/bar", "http://foo.com/bar") test_lc_hostname_with_user = _makeSUCTest( "http://Jimmy@fOO.Com/bar", "http://Jimmy@foo.com/bar") test_lc_hostname_with_user_pass = _makeSUCTest( "http://Jimmy:Sekrit@fOO.Com/bar", "http://Jimmy:Sekrit@foo.com/bar") test_trailing_slash = _makeSUCTest( "http://foo.com/bar/", "http://foo.com/bar") test_trailing_slash_scheme = _makeSUCTest( "http://", "http://") test_trailing_slash_hostname = _makeSUCTest( "http://foo.com/", "http://foo.com") test_trailing_double_slash = _makeSUCTest( "http://foo.com/x//", "http://foo.com/x") test_double_slash = _makeSUCTest( "http://foo.com/x//y", "http://foo.com/x/y") test_slash = _makeSUCTest( "/", "/") test_dot = _makeSUCTest( "http://foo.com/x/./y", "http://foo.com/x/y") test_dot_dot = _makeSUCTest( "http://foo.com/x/../y", "http://foo.com/y") test_double_dot_dot = _makeSUCTest( "http://foo.com/x/y/../../z", "http://foo.com/z") test_dot_dot_root = _makeSUCTest( "http://foo.com/../x/y", "http://foo.com/x/y") test_quote_spaces = _makeSUCTest( "svn+ssh://user@host:123/My Stuff/file.doc", "svn+ssh://user@host:123/My%20Stuff/file.doc") test_remove_port_80 = _makeSUCTest( "http://foo.com:80/bar", "http://foo.com/bar") test_dont_remove_port_80 = _makeSUCTest( "https://foo.com:80/bar", "https://foo.com:80/bar") # not http test_remove_port_443 = _makeSUCTest( "https://foo.com:443/bar", "https://foo.com/bar") test_dont_remove_port_443 = _makeSUCTest( "svn://foo.com:443/bar", "svn://foo.com:443/bar") # not https test_remove_port_3690 = _makeSUCTest( "svn://foo.com:3690/bar", "svn://foo.com/bar") test_dont_remove_port_3690 = _makeSUCTest( "http://foo.com:3690/bar", "http://foo.com:3690/bar") # not svn test_dont_remove_port_other = _makeSUCTest( "https://foo.com:2093/bar", "https://foo.com:2093/bar") test_quote_funny_chars = _makeSUCTest( "http://foo.com/\x10\xe6%", "http://foo.com/%10%E6%25") test_overquoted = _makeSUCTest( "http://foo.com/%68%65%6c%6c%6f%20%77%6f%72%6c%64", "http://foo.com/hello%20world") buildbot-2.6.0/master/buildbot/test/unit/test_steps_subunit.py000066400000000000000000000066011361162603000247010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.python.compat import NativeStringIO from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import subunit from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin @implementer(interfaces.ILogObserver) class StubLogObserver(mock.Mock): pass class TestSetPropertiesFromEnv(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.logobserver = StubLogObserver() self.logobserver.failures = [] self.logobserver.errors = [] self.logobserver.skips = [] self.logobserver.testsRun = 0 self.logobserver.warningio = NativeStringIO() self.patch(subunit, 'SubunitLogObserver', lambda: self.logobserver) return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_empty(self): self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + 0 ) self.expectOutcome(result=SUCCESS, state_string="shell no tests run") return self.runStep() def test_empty_error(self): self.setupStep(subunit.SubunitShellCommand(command='test', failureOnNoTests=True)) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + 0 ) self.expectOutcome(result=FAILURE, state_string="shell no tests run (failure)") return self.runStep() def test_warnings(self): self.setupStep(subunit.SubunitShellCommand(command='test')) self.expectCommands( ExpectShell(workdir='wkdir', command="test") + 0 ) self.logobserver.warnings.append('not quite up to snuff (list)') self.logobserver.warningio.write('not quite up to snuff (io)\n') self.logobserver.testsRun = 3 self.expectOutcome(result=SUCCESS, # N.B. not WARNINGS state_string="shell 3 tests passed") # note that the warnings list is ignored.. self.expectLogfile('warnings', 'not quite up to snuff (io)\n') return self.runStep() # TODO: test text2 generation? # TODO: tests are represented as objects?! buildbot-2.6.0/master/buildbot/test/unit/test_steps_transfer.py000066400000000000000000001115401361162603000250330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import shutil import stat import tarfile import tempfile from io import BytesIO from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SKIPPED from buildbot.process.results import SUCCESS from buildbot.steps import transfer from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes def uploadString(string, timestamp=None): def behavior(command): writer = command.args['writer'] writer.remote_write(string + "\n") writer.remote_close() if timestamp: writer.remote_utime(timestamp) return behavior def downloadString(memoizer, timestamp=None): def behavior(command): reader = command.args['reader'] read = reader.remote_read(1000) # save what we read so we can check it memoizer(read) reader.remote_close() if timestamp: reader.remote_utime(timestamp) return read return behavior def uploadTarFile(filename, **members): def behavior(command): f = BytesIO() archive = tarfile.TarFile(fileobj=f, name=filename, mode='w') for name, content in members.items(): content = unicode2bytes(content) archive.addfile(tarfile.TarInfo(name), BytesIO(content)) writer = command.args['writer'] writer.remote_write(f.getvalue()) writer.remote_unpack() return behavior class UploadError: def __init__(self, behavior): self.behavior = behavior self.writer = None def __call__(self, command): self.writer = command.args['writer'] self.writer.cancel = Mock(wraps=self.writer.cancel) self.behavior(command) raise RuntimeError('uh oh') class TestFileUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() fd, self.destfile = tempfile.mkstemp() os.close(fd) os.unlink(self.destfile) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destfile): os.unlink(self.destfile) return self.tearDownBuildStep() def testConstructorModeType(self): with self.assertRaises(config.ConfigErrors): transfer.FileUpload(workersrc=__file__, masterdest='xyz', mode='g+rwx') def testBasic(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading srcfile") d = self.runStep() return d def testWorker2_16(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading srcfile") d = self.runStep() return d @defer.inlineCallbacks def testTimestamp(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, keepstamp=True)) timestamp = (os.path.getatime(__file__), os.path.getmtime(__file__)) self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=True, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString('test', timestamp=timestamp)) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading %s" % os.path.basename(__file__)) yield self.runStep() desttimestamp = (os.path.getatime(self.destfile), os.path.getmtime(self.destfile)) srctimestamp = [int(t) for t in timestamp] desttimestamp = [int(d) for d in desttimestamp] self.assertEqual(srctimestamp[0], desttimestamp[0]) self.assertEqual(srctimestamp[1], desttimestamp[1]) def testDescriptionDone(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file", descriptionDone="Test File Uploaded")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="Test File Uploaded") d = self.runStep() return d @defer.inlineCallbacks def testURL(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading %s" % os.path.basename(__file__)) yield self.runStep() self.step.addURL.assert_called_once_with( os.path.basename(self.destfile), "http://server/file") @defer.inlineCallbacks def testURLText(self): self.setupStep( transfer.FileUpload(workersrc=__file__, masterdest=self.destfile, url="http://server/file", urlText="testfile")) self.step.addURL = Mock() self.expectCommands( Expect('uploadFile', dict( workersrc=__file__, workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading %s" % os.path.basename(__file__)) yield self.runStep() self.step.addURL.assert_called_once_with( "testfile", "http://server/file") def testFailure(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + 1) self.expectOutcome( result=FAILURE, state_string="uploading srcfile (failure)") d = self.runStep() return d @defer.inlineCallbacks def testException(self): self.setupStep( transfer.FileUpload(workersrc='srcfile', masterdest=self.destfile)) behavior = UploadError(uploadString("Hello world!")) self.expectCommands( Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=262144, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading srcfile (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) def test_init_workersrc_keyword(self): step = transfer.FileUpload( workersrc='srcfile', masterdest='dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_workersrc_positional(self): step = transfer.FileUpload('srcfile', 'dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.FileUpload() with self.assertRaises(TypeError): transfer.FileUpload('src') class TestDirectoryUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.destdir = os.path.abspath('destdir') if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.tearDownBuildStep() def testBasic(self): self.setupStep( transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir)) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") d = self.runStep() return d def testWorker2_16(self): self.setupStep( transfer.DirectoryUpload( workersrc="srcdir", masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading srcdir") d = self.runStep() return d def testFailure(self): self.setupStep( transfer.DirectoryUpload(workersrc="srcdir", masterdest=self.destdir)) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + 1) self.expectOutcome(result=FAILURE, state_string="uploading srcdir (failure)") d = self.runStep() return d @defer.inlineCallbacks def testException(self): self.setupStep( transfer.DirectoryUpload(workersrc='srcdir', masterdest=self.destdir)) behavior = UploadError(uploadTarFile('fake.tar', test="Hello world!")) self.expectCommands( Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading srcdir (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) def test_init_workersrc_keyword(self): step = transfer.DirectoryUpload( workersrc='srcfile', masterdest='dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_workersrc_positional(self): step = transfer.DirectoryUpload('srcfile', 'dstfile') self.assertEqual(step.workersrc, 'srcfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.DirectoryUpload() with self.assertRaises(TypeError): transfer.DirectoryUpload('src') class TestMultipleFileUpload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.destdir = os.path.abspath('destdir') if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destdir): shutil.rmtree(self.destdir) return self.tearDownBuildStep() def testEmpty(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=[], masterdest=self.destdir)) self.expectCommands() self.expectOutcome(result=SKIPPED, state_string="finished (skipped)") d = self.runStep() return d def testFile(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testDirectory(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testMultiple(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") d = self.runStep() return d def testMultipleString(self): self.setupStep( transfer.MultipleFileUpload(workersrcs="srcfile", masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testGlob(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["src*"], masterdest=self.destdir, glob=True)) self.expectCommands( Expect('glob', dict(path=os.path.join( 'wkdir', 'src*'), logEnviron=False)) + Expect.update('files', ["srcfile"]) + 0, Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, ) self.expectOutcome( result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testFailedGlob(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["src*"], masterdest=self.destdir, glob=True)) self.expectCommands( Expect('glob', {'path': os.path.join( 'wkdir', 'src*'), 'logEnviron': False}) + Expect.update('files', []) + 1, ) self.expectOutcome( result=SKIPPED, state_string="uploading 0 files (skipped)") d = self.runStep() return d def testFileWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcfile"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testDirectoryWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcdir"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome(result=SUCCESS, state_string="uploading 1 file") d = self.runStep() return d def testMultipleWorker2_16(self): self.setupStep( transfer.MultipleFileUpload( workersrcs=["srcfile", "srcdir"], masterdest=self.destdir), worker_version={'*': '2.16'}) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( slavesrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( slavesrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") d = self.runStep() return d def testFailure(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + 1) self.expectOutcome( result=FAILURE, state_string="uploading 2 files (failure)") d = self.runStep() return d @defer.inlineCallbacks def testException(self): self.setupStep( transfer.MultipleFileUpload(workersrcs=["srcfile", "srcdir"], masterdest=self.destdir)) behavior = UploadError(uploadString("Hello world!")) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(behavior)) self.expectOutcome( result=EXCEPTION, state_string="uploading 2 files (exception)") yield self.runStep() self.assertEqual(behavior.writer.cancel.called, True) self.assertEqual( len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def testSubclass(self): class CustomStep(transfer.MultipleFileUpload): uploadDone = Mock(return_value=None) allUploadsDone = Mock(return_value=None) step = CustomStep( workersrcs=["srcfile", "srcdir"], masterdest=self.destdir) self.setupStep(step) self.expectCommands( Expect('stat', dict(file="srcfile", workdir='wkdir')) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0, Expect('uploadFile', dict( workersrc="srcfile", workdir='wkdir', blocksize=16384, maxsize=None, keepstamp=False, writer=ExpectRemoteRef(remotetransfer.FileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0, Expect('stat', dict(file="srcdir", workdir='wkdir')) + Expect.update('stat', [stat.S_IFDIR, 99, 99]) + 0, Expect('uploadDirectory', dict( workersrc="srcdir", workdir='wkdir', blocksize=16384, compress=None, maxsize=None, writer=ExpectRemoteRef(remotetransfer.DirectoryWriter))) + Expect.behavior(uploadTarFile('fake.tar', test="Hello world!")) + 0) self.expectOutcome( result=SUCCESS, state_string="uploading 2 files") yield self.runStep() def checkCalls(res): self.assertEqual(step.uploadDone.call_count, 2) self.assertEqual(step.uploadDone.call_args_list[0], ((SUCCESS, 'srcfile', os.path.join(self.destdir, 'srcfile')), {})) self.assertEqual(step.uploadDone.call_args_list[1], ((SUCCESS, 'srcdir', os.path.join(self.destdir, 'srcdir')), {})) self.assertEqual(step.allUploadsDone.call_count, 1) self.assertEqual(step.allUploadsDone.call_args_list[0], ((SUCCESS, ['srcfile', 'srcdir'], self.destdir), {})) def test_init_workersrcs_keyword(self): step = transfer.MultipleFileUpload( workersrcs=['srcfile'], masterdest='dstfile') self.assertEqual(step.workersrcs, ['srcfile']) def test_init_workersrcs_positional(self): step = transfer.MultipleFileUpload(['srcfile'], 'dstfile') self.assertEqual(step.workersrcs, ['srcfile']) def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.MultipleFileUpload() with self.assertRaises(TypeError): transfer.MultipleFileUpload(['srcfile']) class TestFileDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() fd, self.destfile = tempfile.mkstemp() os.close(fd) os.unlink(self.destfile) return self.setUpBuildStep() def tearDown(self): if os.path.exists(self.destfile): os.unlink(self.destfile) return self.tearDownBuildStep() def test_init_workerdest_keyword(self): step = transfer.FileDownload( mastersrc='srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.FileDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.FileDownload() with self.assertRaises(TypeError): transfer.FileDownload('srcfile') @defer.inlineCallbacks def testBasic(self): master_file = __file__ self.setupStep( transfer.FileDownload( mastersrc=master_file, workerdest=self.destfile)) # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest=self.destfile, workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.FileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to {0}".format( os.path.basename(self.destfile))) yield self.runStep() with open(master_file, "rb") as f: contents = f.read() # Only first 1000 bytes transferred in downloadString() helper contents = contents[:1000] self.assertEqual(b''.join(read), contents) @defer.inlineCallbacks def testBasicWorker2_16(self): master_file = __file__ self.setupStep( transfer.FileDownload( mastersrc=master_file, workerdest=self.destfile), worker_version={'*': '2.16'}) # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( slavedest=self.destfile, workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.FileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to {0}".format( os.path.basename(self.destfile))) yield self.runStep() def checkCalls(res): with open(master_file, "rb") as f: contents = f.read() # Only first 1000 bytes transferred in downloadString() helper contents = contents[:1000] self.assertEqual(b''.join(read), contents) class TestStringDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() # check that ConfigErrors is raised on invalid 'mode' argument def testModeConfError(self): with self.assertRaisesRegex(config.ConfigErrors, "StringDownload step's mode must be an integer or None," " got 'not-a-number'"): transfer.StringDownload("string", "file", mode="not-a-number") @defer.inlineCallbacks def testBasic(self): self.setupStep(transfer.StringDownload("Hello World", "hello.txt")) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.txt") yield self.runStep() def checkCalls(res): self.assertEqual(b''.join(read), b"Hello World") @defer.inlineCallbacks def testBasicWorker2_16(self): self.setupStep( transfer.StringDownload("Hello World", "hello.txt"), worker_version={'*': '2.16'}) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( slavedest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.txt") yield self.runStep() self.assertEqual(b''.join(read), b"Hello World") def testFailure(self): self.setupStep(transfer.StringDownload("Hello World", "hello.txt")) self.expectCommands( Expect('downloadFile', dict( workerdest="hello.txt", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + 1) self.expectOutcome( result=FAILURE, state_string="downloading to hello.txt (failure)") return self.runStep() def test_init_workerdest_keyword(self): step = transfer.StringDownload('srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.StringDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.StringDownload() with self.assertRaises(TypeError): transfer.StringDownload('srcfile') class TestJSONStringDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def testBasic(self): msg = dict(message="Hello World") self.setupStep(transfer.JSONStringDownload(msg, "hello.json")) self.step.worker = Mock() self.step.remote = Mock() # A place to store what gets read read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="hello.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader)) ) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to hello.json") yield self.runStep() self.assertEqual(b''.join(read), b'{"message": "Hello World"}') def testFailure(self): msg = dict(message="Hello World") self.setupStep(transfer.JSONStringDownload(msg, "hello.json")) self.expectCommands( Expect('downloadFile', dict( workerdest="hello.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader))) + 1) self.expectOutcome( result=FAILURE, state_string="downloading to hello.json (failure)") return self.runStep() def test_init_workerdest_keyword(self): step = transfer.JSONStringDownload('srcfile', workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.JSONStringDownload('srcfile', 'dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.JSONStringDownload() with self.assertRaises(TypeError): transfer.JSONStringDownload('srcfile') class TestJSONPropertiesDownload(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def testBasic(self): self.setupStep(transfer.JSONPropertiesDownload("props.json")) self.step.build.setProperty('key1', 'value1', 'test') read = [] self.expectCommands( Expect('downloadFile', dict( workerdest="props.json", workdir='wkdir', blocksize=16384, maxsize=None, mode=None, reader=ExpectRemoteRef(remotetransfer.StringFileReader)) ) + Expect.behavior(downloadString(read.append)) + 0) self.expectOutcome( result=SUCCESS, state_string="downloading to props.json") yield self.runStep() # we decode as key order is dependent of python version self.assertEqual(json.loads((b''.join(read)).decode()), { "properties": {"key1": "value1"}, "sourcestamps": []}) def test_init_workerdest_keyword(self): step = transfer.JSONPropertiesDownload(workerdest='dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_workerdest_positional(self): step = transfer.JSONPropertiesDownload('dstfile') self.assertEqual(step.workerdest, 'dstfile') def test_init_positional_args(self): with self.assertRaises(TypeError): transfer.JSONPropertiesDownload() buildbot-2.6.0/master/buildbot/test/unit/test_steps_trigger.py000066400000000000000000000666171361162603000246700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from zope.interface import implementer from buildbot import config from buildbot import interfaces from buildbot.process import properties from buildbot.process.results import CANCELLED from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.status import master from buildbot.steps import trigger from buildbot.test.fake import fakedb from buildbot.test.util import steps from buildbot.test.util.interfaces import InterfaceTests from buildbot.test.util.misc import TestReactorMixin @implementer(interfaces.ITriggerableScheduler) class FakeTriggerable: triggered_with = None result = SUCCESS bsid = 1 brids = {} exception = False never_finish = False def __init__(self, name): self.name = name def trigger(self, waited_for, sourcestamps=None, set_props=None, parent_buildid=None, parent_relationship=None): self.triggered_with = (waited_for, sourcestamps, set_props.properties) idsDeferred = defer.Deferred() idsDeferred.callback((self.bsid, self.brids)) resultsDeferred = defer.Deferred() if not self.never_finish: if self.exception: reactor.callLater( 0, resultsDeferred.errback, RuntimeError('oh noes')) else: reactor.callLater( 0, resultsDeferred.callback, (self.result, self.brids)) return (idsDeferred, resultsDeferred) class TriggerableInterfaceTest(unittest.TestCase, InterfaceTests): def test_interface(self): self.assertInterfacesImplemented(FakeTriggerable) class FakeSourceStamp: def __init__(self, **kwargs): self.__dict__.update(kwargs) def asDict(self, includePatch=True): return self.__dict__.copy() class FakeSchedulerManager: pass # Magic numbers that relate brid to other build settings def BRID_TO_BSID(brid): return brid + 2000 def BRID_TO_BID(brid): return brid + 3000 def BRID_TO_BUILD_NUMBER(brid): return brid + 4000 class TestTrigger(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() @defer.inlineCallbacks def setupStep(self, step, sourcestampsInBuild=None, gotRevisionsInBuild=None, *args, **kwargs): sourcestamps = sourcestampsInBuild or [] got_revisions = gotRevisionsInBuild or {} super().setupStep(step, *args, **kwargs) # This step reaches deeply into a number of parts of Buildbot. That # should be fixed! # set up a buildmaster that knows about two fake schedulers, a and b m = self.master m.db.checkForeignKeys = True self.build.builder.botmaster = m.botmaster self.build.conn = object() m.status = master.Status() yield m.status.setServiceParent(m) m.config.buildbotURL = "baseurl/" m.scheduler_manager = FakeSchedulerManager() self.scheduler_a = a = FakeTriggerable(name='a') self.scheduler_b = b = FakeTriggerable(name='b') self.scheduler_c = c = FakeTriggerable(name='c') m.scheduler_manager.namedServices = dict(a=a, b=b, c=c) a.brids = {77: 11} b.brids = {78: 22} c.brids = {79: 33, 80: 44} def make_fake_br(brid, builderid): return fakedb.BuildRequest( id=brid, buildsetid=BRID_TO_BSID(brid), builderid=builderid) def make_fake_build(brid, builderid): return fakedb.Build( buildrequestid=brid, id=BRID_TO_BID(brid), number=BRID_TO_BUILD_NUMBER(brid), masterid=9, workerid=13, builderid=builderid) m.db.insertTestData([ fakedb.Builder(id=77, name='A'), fakedb.Builder(id=78, name='B'), fakedb.Builder(id=79, name='C1'), fakedb.Builder(id=80, name='C2'), fakedb.Master(id=9), fakedb.Buildset(id=2022), fakedb.Buildset(id=2011), fakedb.Buildset(id=2033), fakedb.Worker(id=13, name="some:worker"), make_fake_br(11, 77), make_fake_br(22, 78), fakedb.BuildRequest(id=33, buildsetid=2033, builderid=79), fakedb.BuildRequest(id=44, buildsetid=2033, builderid=80), make_fake_build(11, builderid=77), make_fake_build(22, builderid=78), make_fake_build(33, builderid=79), # builderid is 79 on purpose, changed, from the one of the buildrequest # to test the case of the virtual make_fake_build(44, builderid=79), ]) def getAllSourceStamps(): return sourcestamps self.build.getAllSourceStamps = getAllSourceStamps def getAllGotRevisions(): return got_revisions self.step.getAllGotRevisions = getAllGotRevisions self.exp_add_sourcestamp = None self.exp_a_trigger = None self.exp_b_trigger = None self.exp_c_trigger = None self.exp_added_urls = [] @defer.inlineCallbacks def runStep(self, results_dict=None): if results_dict is None: results_dict = {} if self.step.waitForFinish: for i in [11, 22, 33, 44]: yield self.master.db.builds.finishBuild(BRID_TO_BID(i), results_dict.get(i, SUCCESS)) d = super().runStep() # the build doesn't finish until after a callLater, so this has the # effect of checking whether the deferred has been fired already; if self.step.waitForFinish: self.assertFalse(d.called) else: self.assertTrue(d.called) yield d self.assertEqual(self.scheduler_a.triggered_with, self.exp_a_trigger) self.assertEqual(self.scheduler_b.triggered_with, self.exp_b_trigger) # check the URLs stepUrls = self.master.data.updates.stepUrls if stepUrls: got_added_urls = stepUrls[list(stepUrls)[0]] else: got_added_urls = [] self.assertEqual(sorted(got_added_urls), sorted(self.exp_added_urls)) if self.exp_add_sourcestamp: self.assertEqual(self.addSourceStamp_kwargs, self.exp_add_sourcestamp) # pause runStep's completion until after any other callLater's are done d = defer.Deferred() reactor.callLater(0, d.callback, None) yield d def expectTriggeredWith(self, a=None, b=None, c=None, d=None): self.exp_a_trigger = a if a is not None: self.expectTriggeredLinks('a_br') self.exp_b_trigger = b if b is not None: self.expectTriggeredLinks('b_br') self.exp_c_trigger = c if c is not None: self.expectTriggeredLinks('c_br') def expectAddedSourceStamp(self, **kwargs): self.exp_add_sourcestamp = kwargs def expectTriggeredLinks(self, *args): if 'a_br' in args: self.exp_added_urls.append( ('a #11', 'baseurl/#buildrequests/11')) if 'b_br' in args: self.exp_added_urls.append( ('b #22', 'baseurl/#buildrequests/22')) if 'c_br' in args: self.exp_added_urls.append( ('c #33', 'baseurl/#buildrequests/33')) self.exp_added_urls.append( ('c #44', 'baseurl/#buildrequests/44')) if 'a' in args: self.exp_added_urls.append( ('success: A #4011', 'baseurl/#builders/77/builds/4011')) if 'b' in args: self.exp_added_urls.append( ('success: B #4022', 'baseurl/#builders/78/builds/4022')) if 'afailed' in args: self.exp_added_urls.append( ('failure: A #4011', 'baseurl/#builders/77/builds/4011')) if 'c' in args: self.exp_added_urls.append( ('success: C1 #4033', 'baseurl/#builders/79/builds/4033')) self.exp_added_urls.append( ('success: C1 #4044', 'baseurl/#builders/79/builds/4044')) # tests def test_no_schedulerNames(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger() def test_unimportantSchedulerNames_not_in_schedulerNames(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['a'], unimportantsShedulerNames=['b']) def test_sourceStamp_and_updateSourceStamp(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamp=dict(x=1), updateSourceStamp=True) def test_sourceStamps_and_updateSourceStamp(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamps=[dict(x=1), dict(x=2)], updateSourceStamp=True) def test_updateSourceStamp_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], updateSourceStamp=True, alwaysUseLatest=True) def test_sourceStamp_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamp=dict(x=1), alwaysUseLatest=True) def test_sourceStamps_and_alwaysUseLatest(self): with self.assertRaises(config.ConfigErrors): trigger.Trigger(schedulerNames=['c'], sourceStamps=[dict(x=1), dict(x=2)], alwaysUseLatest=True) @defer.inlineCallbacks def test_simple(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], sourceStamps={})) self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_simple_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'])) self.scheduler_a.result = FAILURE # not waitForFinish, so trigger step succeeds even though the build # didn't fail self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_simple_exception(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'])) self.scheduler_a.exception = True self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith(a=(False, [], {})) yield self.runStep() self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_bogus_scheduler(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'x'])) # bogus scheduler is an exception, not a failure (don't blame the patch) self.expectOutcome(result=EXCEPTION) self.expectTriggeredWith(a=None) # a is not triggered! yield self.runStep() self.flushLoggedErrors(ValueError) @defer.inlineCallbacks def test_updateSourceStamp(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.expectOutcome(result=SUCCESS, state_string='triggered a') self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 23456}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_no_got_revision(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, # uses old revision [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_not_updateSourceStamp(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=False), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_multiple_repositories(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=True), sourcestampsInBuild=[ FakeSourceStamp(codebase='cb1', revision='12345'), FakeSourceStamp(codebase='cb2', revision='12345') ], gotRevisionsInBuild={'cb1': 23456, 'cb2': 34567}, ) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith( a=(False, [{'codebase': 'cb1', 'revision': 23456}, {'codebase': 'cb2', 'revision': 34567}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_prop_false(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=properties.Property('usess')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.properties.setProperty('usess', False, 'me') self.expectOutcome(result=SUCCESS) # didn't use got_revision self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_updateSourceStamp_prop_true(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], updateSourceStamp=properties.Property('usess')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ], gotRevisionsInBuild={'': 23456}, ) self.properties.setProperty('usess', True, 'me') self.expectOutcome(result=SUCCESS) # didn't use got_revision self.expectTriggeredWith( a=(False, [{'codebase': '', 'repository': 'x', 'revision': 23456}], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=True), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.expectOutcome(result=SUCCESS) # Do not pass setid self.expectTriggeredWith(b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest_prop_false(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=properties.Property('aul')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.properties.setProperty('aul', False, 'me') self.expectOutcome(result=SUCCESS) # didn't use latest self.expectTriggeredWith( b=(False, [{'codebase': '', 'repository': 'x', 'revision': 11111}], {})) yield self.runStep() @defer.inlineCallbacks def test_alwaysUseLatest_prop_true(self): yield self.setupStep(trigger.Trigger(schedulerNames=['b'], alwaysUseLatest=properties.Property('aul')), sourcestampsInBuild=[FakeSourceStamp(codebase='', repository='x', revision=11111) ]) self.properties.setProperty('aul', True, 'me') self.expectOutcome(result=SUCCESS) # didn't use latest self.expectTriggeredWith(b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_sourceStamp(self): ss = dict(revision=9876, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamp=ss)) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss], {})) yield self.runStep() @defer.inlineCallbacks def test_set_of_sourceStamps(self): ss1 = dict( codebase='cb1', repository='r1', revision=9876, branch='dev') ss2 = dict( codebase='cb2', repository='r2', revision=5432, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamps=[ss1, ss2])) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss1, ss2], {})) yield self.runStep() @defer.inlineCallbacks def test_set_of_sourceStamps_override_build(self): ss1 = dict( codebase='cb1', repository='r1', revision=9876, branch='dev') ss2 = dict( codebase='cb2', repository='r2', revision=5432, branch='dev') ss3 = FakeSourceStamp( codebase='cb3', repository='r3', revision=1234, branch='dev') ss4 = FakeSourceStamp( codebase='cb4', repository='r4', revision=2345, branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamps=[ss1, ss2]), sourcestampsInBuild=[ss3, ss4]) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [ss1, ss2], {})) yield self.runStep() @defer.inlineCallbacks def test_sourceStamp_prop(self): ss = dict(revision=properties.Property('rev'), branch='dev') yield self.setupStep(trigger.Trigger(schedulerNames=['b'], sourceStamp=ss)) self.properties.setProperty('rev', 602, 'me') expected_ss = dict(revision=602, branch='dev') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(b=(False, [expected_ss], {})) yield self.runStep() @defer.inlineCallbacks def test_waitForFinish(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('a', 'b') yield self.runStep() @defer.inlineCallbacks def test_waitForFinish_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.scheduler_a.result = FAILURE self.expectOutcome(result=FAILURE) self.expectTriggeredWith(a=(True, [], {})) self.expectTriggeredLinks('afailed') yield self.runStep(results_dict={11: FAILURE}) @defer.inlineCallbacks def test_waitForFinish_split_failure(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.scheduler_a.result = FAILURE self.scheduler_b.result = SUCCESS self.expectOutcome(result=FAILURE, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('afailed', 'b') yield self.runStep(results_dict={11: FAILURE}) @defer.inlineCallbacks def test_waitForFinish_exception(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], waitForFinish=True)) self.step.addCompleteLog = Mock() self.scheduler_b.exception = True self.expectOutcome(result=EXCEPTION, state_string='triggered a, b') self.expectTriggeredWith( a=(True, [], {}), b=(True, [], {})) self.expectTriggeredLinks('a') # b doesn't return a brid yield self.runStep() self.assertEqual(len(self.step.addCompleteLog.call_args_list), 1) @defer.inlineCallbacks def test_virtual_builder(self): yield self.setupStep(trigger.Trigger(schedulerNames=['c'], waitForFinish=True)) self.expectOutcome(result=SUCCESS, state_string='triggered c') self.expectTriggeredWith( c=(True, [], {})) self.expectTriggeredLinks('c') yield self.runStep() @defer.inlineCallbacks def test_set_properties(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], set_properties=dict(x=1, y=2))) self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(x=(1, 'Trigger'), y=(2, 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_set_properties_prop(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], set_properties=dict(x=properties.Property('X'), y=2))) self.properties.setProperty('X', 'xxx', 'here') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(x=('xxx', 'Trigger'), y=(2, 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_copy_properties(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], copy_properties=['a', 'b'])) self.properties.setProperty('a', 'A', 'AA') self.properties.setProperty('b', 'B', 'BB') self.properties.setProperty('c', 'C', 'CC') self.expectOutcome(result=SUCCESS) self.expectTriggeredWith(a=(False, [], dict(a=('A', 'Trigger'), b=('B', 'Trigger')))) yield self.runStep() @defer.inlineCallbacks def test_waitForFinish_interrupt(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.expectOutcome(result=CANCELLED, state_string='interrupted') self.expectTriggeredWith(a=(True, [], {})) d = self.runStep() # interrupt before the callLater representing the Triggerable # schedulers completes self.step.interrupt(failure.Failure(RuntimeError('oh noes'))) yield d @defer.inlineCallbacks def test_waitForFinish_interrupt_no_connection(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a'], waitForFinish=True)) self.expectOutcome(result=CANCELLED, state_string='interrupted') self.expectTriggeredWith(a=(True, [], {})) self.scheduler_a.never_finish = True d = self.runStep() # interrupt before the callLater representing the Triggerable # schedulers completes self.build.conn = None self.step.interrupt(failure.Failure(RuntimeError('oh noes'))) yield d @defer.inlineCallbacks def test_getSchedulersAndProperties_back_comp(self): class DynamicTrigger(trigger.Trigger): def getSchedulersAndProperties(self): return [("a", {}, False), ("b", {}, True)] yield self.setupStep(DynamicTrigger(schedulerNames=['a', 'b'])) self.scheduler_a.result = SUCCESS self.scheduler_b.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith(a=(False, [], {}), b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_unimportantsShedulerNames(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'b'], unimportantSchedulerNames=['b'])) self.scheduler_a.result = SUCCESS self.scheduler_b.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, b') self.expectTriggeredWith(a=(False, [], {}), b=(False, [], {})) yield self.runStep() @defer.inlineCallbacks def test_unimportantsShedulerNames_with_more_brids_for_bsid(self): yield self.setupStep(trigger.Trigger(schedulerNames=['a', 'c'], unimportantSchedulerNames=['c'])) self.scheduler_a.result = SUCCESS self.scheduler_c.result = FAILURE self.expectOutcome(result=SUCCESS, state_string='triggered a, c') self.expectTriggeredWith(a=(False, [], {}), c=(False, [], {})) yield self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_steps_vstudio.py000066400000000000000000000765211361162603000247150ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from mock import Mock from twisted.internet import defer from twisted.trial import unittest from buildbot.process.properties import Property from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.process.results import WARNINGS from buildbot.steps import vstudio from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin real_log = r""" 1>------ Build started: Project: lib1, Configuration: debug Win32 ------ 1>Compiling... 1>SystemLog.cpp 1>c:\absolute\path\to\systemlog.cpp(7) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(12) : warning C4100: 'statusword' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(12) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(17) : warning C4100: 'retryCounter' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(17) : warning C4100: 'op' : unreferenced formal parameter 1>c:\absolute\path\to\systemlog.cpp(22) : warning C4100: 'op' : unreferenced formal parameter 1>Creating library... 1>Build log was saved at "file://c:\another\absolute\path\to\debug\BuildLog.htm" 1>lib1 - 0 error(s), 6 warning(s) 2>------ Build started: Project: product, Configuration: debug Win32 ------ 2>Linking... 2>LINK : fatal error LNK1168: cannot open ../../debug/directory/dllname.dll for writing 2>Build log was saved at "file://c:\another\similar\path\to\debug\BuildLog.htm" 2>product - 1 error(s), 0 warning(s) ========== Build: 1 succeeded, 1 failed, 6 up-to-date, 0 skipped ========== """ class TestAddEnvPath(unittest.TestCase): def do_test(self, initial_env, name, value, expected_env): vstudio.addEnvPath(initial_env, name, value) self.assertEqual(initial_env, expected_env) def test_new(self): self.do_test({}, 'PATH', r'C:\NOTHING', {'PATH': r'C:\NOTHING;'}) def test_new_semi(self): self.do_test({}, 'PATH', r'C:\NOTHING;', {'PATH': r'C:\NOTHING;'}) def test_existing(self): self.do_test({'PATH': '/bin'}, 'PATH', r'C:\NOTHING', {'PATH': r'/bin;C:\NOTHING;'}) def test_existing_semi(self): self.do_test({'PATH': '/bin;'}, 'PATH', r'C:\NOTHING', {'PATH': r'/bin;C:\NOTHING;'}) def test_existing_both_semi(self): self.do_test({'PATH': '/bin;'}, 'PATH', r'C:\NOTHING;', {'PATH': r'/bin;C:\NOTHING;'}) class MSLogLineObserver(unittest.TestCase): def setUp(self): self.warnings = [] lw = Mock() lw.addStdout = lambda l: self.warnings.append(l.rstrip()) self.errors = [] self.errors_stderr = [] le = Mock() le.addStdout = lambda l: self.errors.append(('o', l.rstrip())) le.addStderr = lambda l: self.errors.append(('e', l.rstrip())) self.llo = vstudio.MSLogLineObserver(lw, le) self.progress = {} self.llo.step = Mock() self.llo.step.setProgress = self.progress.__setitem__ def receiveLines(self, *lines): for line in lines: self.llo.outLineReceived(line) def assertResult(self, nbFiles=0, nbProjects=0, nbWarnings=0, nbErrors=0, errors=None, warnings=None, progress=None): if errors is None: errors = [] if warnings is None: warnings = [] if progress is None: progress = {} self.assertEqual( dict(nbFiles=self.llo.nbFiles, nbProjects=self.llo.nbProjects, nbWarnings=self.llo.nbWarnings, nbErrors=self.llo.nbErrors, errors=self.errors, warnings=self.warnings, progress=self.progress), dict(nbFiles=nbFiles, nbProjects=nbProjects, nbWarnings=nbWarnings, nbErrors=nbErrors, errors=errors, warnings=warnings, progress=progress)) def test_outLineReceived_empty(self): self.llo.outLineReceived('abcd\r\n') self.assertResult() def test_outLineReceived_projects(self): lines = [ "123>----- some project 1 -----", "123>----- some project 2 -----", ] self.receiveLines(*lines) self.assertResult(nbProjects=2, progress=dict(projects=2), errors=[('o', l) for l in lines], warnings=lines) def test_outLineReceived_files(self): lines = [ "123>SomeClass.cpp", "123>SomeStuff.c", "123>SomeStuff.h", # .h files not recognized ] self.receiveLines(*lines) self.assertResult(nbFiles=2, progress=dict(files=2)) def test_outLineReceived_warnings(self): lines = [ "abc: warning ABC123: xyz!", "def : warning DEF456: wxy!", ] self.receiveLines(*lines) self.assertResult(nbWarnings=2, progress=dict(warnings=2), warnings=lines) def test_outLineReceived_errors(self): lines = [ "error ABC123: foo", " error DEF456 : bar", " error : bar", " error: bar", # NOTE: not matched ] self.receiveLines(*lines) self.assertResult(nbErrors=3, # note: no progress errors=[ ('e', "error ABC123: foo"), ('e', " error DEF456 : bar"), ('e', " error : bar"), ]) def test_outLineReceived_real(self): # based on a real logfile donated by Ben Allard lines = real_log.split("\n") self.receiveLines(*lines) errors = [ ('o', '1>------ Build started: Project: lib1, Configuration: debug Win32 ------'), ('o', '2>------ Build started: Project: product, Configuration: debug Win32 ------'), ('e', '2>LINK : fatal error LNK1168: cannot open ../../debug/directory/dllname.dll for writing') ] warnings = [ '1>------ Build started: Project: lib1, Configuration: debug Win32 ------', "1>c:\\absolute\\path\\to\\systemlog.cpp(7) : warning C4100: 'op' : unreferenced formal parameter", "1>c:\\absolute\\path\\to\\systemlog.cpp(12) : warning C4100: 'statusword' : unreferenced formal parameter", "1>c:\\absolute\\path\\to\\systemlog.cpp(12) : warning C4100: 'op' : unreferenced formal parameter", "1>c:\\absolute\\path\\to\\systemlog.cpp(17) : warning C4100: 'retryCounter' : unreferenced formal parameter", "1>c:\\absolute\\path\\to\\systemlog.cpp(17) : warning C4100: 'op' : unreferenced formal parameter", "1>c:\\absolute\\path\\to\\systemlog.cpp(22) : warning C4100: 'op' : unreferenced formal parameter", '2>------ Build started: Project: product, Configuration: debug Win32 ------', ] self.assertResult(nbFiles=1, nbErrors=1, nbProjects=2, nbWarnings=6, progress={'files': 1, 'projects': 2, 'warnings': 6}, errors=errors, warnings=warnings) class VCx(vstudio.VisualStudio): def start(self): command = ["command", "here"] self.setCommand(command) return super().start() class VisualStudio(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): """ Test L{VisualStudio} with a simple subclass, L{VCx}. """ def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_default_config(self): vs = vstudio.VisualStudio() self.assertEqual(vs.config, 'release') def test_simple(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_installdir(self): self.setupStep(VCx(installdir=r'C:\I')) self.step.exp_installdir = r'C:\I' self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual(self.step.installdir, r'C:\I') def test_evaluateCommand_failure(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 1 ) self.expectOutcome(result=FAILURE, state_string="compile 0 projects 0 files (failure)") return self.runStep() def test_evaluateCommand_errors(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + ExpectShell.log('stdio', stdout='error ABC123: foo\r\n') + 0 ) self.expectOutcome(result=FAILURE, state_string="compile 0 projects 0 files 1 errors (failure)") return self.runStep() def test_evaluateCommand_warnings(self): self.setupStep(VCx()) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + ExpectShell.log('stdio', stdout='foo: warning ABC123: foo\r\n') + 0 ) self.expectOutcome(result=WARNINGS, state_string="compile 0 projects 0 files 1 warnings (warnings)") return self.runStep() def test_env_setup(self): self.setupStep(VCx( INCLUDE=[r'c:\INC1', r'c:\INC2'], LIB=[r'c:\LIB1', r'C:\LIB2'], PATH=[r'c:\P1', r'C:\P2'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here'], env=dict( INCLUDE=r'c:\INC1;c:\INC2;', LIB=r'c:\LIB1;C:\LIB2;', PATH=r'c:\P1;C:\P2;')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_setup_existing(self): self.setupStep(VCx( INCLUDE=[r'c:\INC1', r'c:\INC2'], LIB=[r'c:\LIB1', r'C:\LIB2'], PATH=[r'c:\P1', r'C:\P2'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here'], env=dict( INCLUDE=r'c:\INC1;c:\INC2;', LIB=r'c:\LIB1;C:\LIB2;', PATH=r'c:\P1;C:\P2;')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_rendering(self): self.setupStep(VCx( projectfile=Property('a'), config=Property('b'), project=Property('c'))) self.properties.setProperty('a', 'aa', 'Test') self.properties.setProperty('b', 'bb', 'Test') self.properties.setProperty('c', 'cc', 'Test') self.expectCommands( ExpectShell(workdir='wkdir', command=['command', 'here']) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual( [self.step.projectfile, self.step.config, self.step.project], ['aa', 'bb', 'cc']) class TestVC6(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def getExpectedEnv(self, installdir, LIB=None, p=None, i=None): include = [ installdir + r'\VC98\INCLUDE;', installdir + r'\VC98\ATL\INCLUDE;', installdir + r'\VC98\MFC\INCLUDE;', ] lib = [ installdir + r'\VC98\LIB;', installdir + r'\VC98\MFC\LIB;', ] path = [ installdir + r'\Common\msdev98\BIN;', installdir + r'\VC98\BIN;', installdir + r'\Common\TOOLS\WINNT;', installdir + r'\Common\TOOLS;', ] if p: path.insert(0, '%s;' % p) if i: include.insert(0, '%s;' % i) if LIB: lib.insert(0, '%s;' % LIB) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) def test_args(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/REBUILD'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/CLEAN'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_noproj_build(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'ALL - cfg', '/BUILD'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_prepend(self): self.setupStep(vstudio.VC6(projectfile='pf', config='cfg', project='pj', PATH=['p'], INCLUDE=['i'], LIB=['l'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['msdev', 'pf', '/MAKE', 'pj - cfg', '/REBUILD', '/USEENV'], # note extra param env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio', LIB='l', p='p', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC7(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def getExpectedEnv(self, installdir, LIB=None, p=None, i=None): include = [ installdir + r'\VC7\INCLUDE;', installdir + r'\VC7\ATLMFC\INCLUDE;', installdir + r'\VC7\PlatformSDK\include;', installdir + r'\SDK\v1.1\include;', ] lib = [ installdir + r'\VC7\LIB;', installdir + r'\VC7\ATLMFC\LIB;', installdir + r'\VC7\PlatformSDK\lib;', installdir + r'\SDK\v1.1\lib;', ] path = [ installdir + r'\Common7\IDE;', installdir + r'\VC7\BIN;', installdir + r'\Common7\Tools;', installdir + r'\Common7\Tools\bin;', ] if p: path.insert(0, '%s;' % p) if i: include.insert(0, '%s;' % i) if LIB: lib.insert(0, '%s;' % LIB) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) def test_args(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_noproj_build(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', mode='build')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Build', 'cfg'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_env_prepend(self): self.setupStep(vstudio.VC7(projectfile='pf', config='cfg', project='pj', PATH=['p'], INCLUDE=['i'], LIB=['l'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/UseEnv', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio .NET 2003', LIB='l', p='p', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class VC8ExpectedEnvMixin: # used for VC8 and VC9Express def getExpectedEnv(self, installdir, x64=False, LIB=None, i=None, p=None): include = [ installdir + r'\VC\INCLUDE;', installdir + r'\VC\ATLMFC\include;', installdir + r'\VC\PlatformSDK\include;', ] lib = [ installdir + r'\VC\LIB;', installdir + r'\VC\ATLMFC\LIB;', installdir + r'\VC\PlatformSDK\lib;', installdir + r'\SDK\v2.0\lib;', ] path = [ installdir + r'\Common7\IDE;', installdir + r'\VC\BIN;', installdir + r'\Common7\Tools;', installdir + r'\Common7\Tools\bin;', installdir + r'\VC\PlatformSDK\bin;', installdir + r'\SDK\v2.0\bin;', installdir + r'\VC\VCPackages;', r'${PATH};', ] if x64: path.insert(1, installdir + r'\VC\BIN\x86_amd64;') lib = [lb[:-1] + r'\amd64;' for lb in lib] if LIB: lib.insert(0, '%s;' % LIB) if p: path.insert(0, '%s;' % p) if i: include.insert(0, '%s;' % i) return dict( INCLUDE=''.join(include), LIB=''.join(lib), PATH=''.join(path), ) class TestVC8(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_args(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', arch='arch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_args_x64(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', arch='x64')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', x64=True)) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() @defer.inlineCallbacks def test_rendering(self): self.setupStep(vstudio.VC8(projectfile='pf', config='cfg', arch=Property('a'))) self.properties.setProperty('a', 'x64', 'Test') self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', x64=True)) # property has expected effect + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") yield self.runStep() self.assertEqual(self.step.arch, 'x64') class TestVCExpress9(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_args(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( # note: still uses version 8 (?!) r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_clean(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj', mode='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Clean', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( # note: still uses version 8 (?!) r'C:\Program Files\Microsoft Visual Studio 8')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() def test_mode_build_env(self): self.setupStep(vstudio.VCExpress9(projectfile='pf', config='cfg', project='pj', mode='build', INCLUDE=['i'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['vcexpress', 'pf', '/Build', 'cfg', '/UseEnv', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 8', i='i')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC9(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC9(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 9.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC10(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC10(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 10.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestVC11(VC8ExpectedEnvMixin, steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_installdir(self): self.setupStep(vstudio.VC11(projectfile='pf', config='cfg', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command=['devenv.com', 'pf', '/Rebuild', 'cfg', '/Project', 'pj'], env=self.getExpectedEnv( r'C:\Program Files\Microsoft Visual Studio 11.0')) + 0 ) self.expectOutcome(result=SUCCESS, state_string="compile 0 projects 0 files") return self.runStep() class TestMsBuild(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_build_project(self): self.setupStep(vstudio.MsBuild( projectfile='pf', config='cfg', platform='Win32', project='pj')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="Win32" /maxcpucount /t:"pj"', env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built pj for cfg|Win32") return self.runStep() def test_build_solution(self): self.setupStep( vstudio.MsBuild(projectfile='pf', config='cfg', platform='x64')) self.expectCommands( ExpectShell(workdir='wkdir', command='"%VCENV_BAT%" x86 && msbuild "pf" /p:Configuration="cfg" /p:Platform="x64" /maxcpucount /t:Rebuild', env={'VCENV_BAT': r'${VS110COMNTOOLS}..\..\VC\vcvarsall.bat'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="built solution for cfg|x64") return self.runStep() class Aliases(unittest.TestCase): def test_vs2003(self): self.assertIdentical(vstudio.VS2003, vstudio.VC7) def test_vs2005(self): self.assertIdentical(vstudio.VS2005, vstudio.VC8) def test_vs2008(self): self.assertIdentical(vstudio.VS2008, vstudio.VC9) def test_vs2010(self): self.assertIdentical(vstudio.VS2010, vstudio.VC10) def test_vs2012(self): self.assertIdentical(vstudio.VS2012, vstudio.VC11) buildbot-2.6.0/master/buildbot/test/unit/test_steps_worker.py000066400000000000000000000422261361162603000245240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import stat from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import WorkerTooOldError from buildbot.process import buildstep from buildbot.process import properties from buildbot.process import remotetransfer from buildbot.process.results import EXCEPTION from buildbot.process.results import FAILURE from buildbot.process.results import SUCCESS from buildbot.steps import worker from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.util import steps from buildbot.test.util.misc import TestReactorMixin def uploadString(string): def behavior(command): writer = command.args['writer'] writer.remote_write(string) writer.remote_close() return behavior class TestSetPropertiesFromEnv(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_simple(self): self.setupStep(worker.SetPropertiesFromEnv( variables=["one", "two", "three", "five", "six"], source="me")) self.worker.worker_environ = { "one": "1", "two": None, "six": "6", "FIVE": "555"} self.worker.worker_system = 'linux' self.properties.setProperty("four", 4, "them") self.properties.setProperty("five", 5, "them") self.properties.setProperty("six", 99, "them") self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty('one', "1", source='me') self.expectNoProperty('two') self.expectNoProperty('three') self.expectProperty('four', 4, source='them') self.expectProperty('five', 5, source='them') self.expectProperty('six', '6', source='me') self.expectLogfile("properties", "one = '1'\nsix = '6'") return self.runStep() def test_case_folding(self): self.setupStep(worker.SetPropertiesFromEnv( variables=["eNv"], source="me")) self.worker.worker_environ = {"ENV": 'EE'} self.worker.worker_system = 'win32' self.expectOutcome(result=SUCCESS, state_string="Set") self.expectProperty('eNv', 'EE', source='me') self.expectLogfile("properties", "eNv = 'EE'") return self.runStep() class TestFileExists(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_found(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + Expect.update('stat', [stat.S_IFREG, 99, 99]) + 0 ) self.expectOutcome(result=SUCCESS, state_string="File found.") return self.runStep() def test_not_found(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + Expect.update('stat', [0, 99, 99]) + 0 ) self.expectOutcome(result=FAILURE, state_string="Not a file. (failure)") return self.runStep() def test_failure(self): self.setupStep(worker.FileExists(file="x")) self.expectCommands( Expect('stat', {'file': 'x'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="File not found. (failure)") return self.runStep() def test_render(self): self.setupStep(worker.FileExists(file=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('stat', {'file': 'XXX'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="File not found. (failure)") return self.runStep() @defer.inlineCallbacks def test_old_version(self): self.setupStep(worker.FileExists(file="x"), worker_version=dict()) self.expectOutcome(result=EXCEPTION, state_string="finished (exception)") yield self.runStep() self.flushLoggedErrors(WorkerTooOldError) class TestCopyDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.CopyDirectory(src="s", dest="d")) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_timeout(self): self.setupStep(worker.CopyDirectory(src="s", dest="d", timeout=300)) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'timeout': 300}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_maxTime(self): self.setupStep(worker.CopyDirectory(src="s", dest="d", maxTime=10)) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd', 'maxTime': 10}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied s to d") return self.runStep() def test_failure(self): self.setupStep(worker.CopyDirectory(src="s", dest="d")) self.expectCommands( Expect('cpdir', {'fromdir': 's', 'todir': 'd'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Copying s to d failed.") return self.runStep() def test_render(self): self.setupStep(worker.CopyDirectory( src=properties.Property("x"), dest=properties.Property("y"))) self.properties.setProperty('x', 'XXX', 'here') self.properties.setProperty('y', 'YYY', 'here') self.expectCommands( Expect('cpdir', {'fromdir': 'XXX', 'todir': 'YYY'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Copied XXX to YYY") return self.runStep() class TestRemoveDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.RemoveDirectory(dir="d")) self.expectCommands( Expect('rmdir', {'dir': 'd'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Deleted") return self.runStep() def test_failure(self): self.setupStep(worker.RemoveDirectory(dir="d")) self.expectCommands( Expect('rmdir', {'dir': 'd'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Deleted (failure)") return self.runStep() def test_render(self): self.setupStep(worker.RemoveDirectory(dir=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('rmdir', {'dir': 'XXX'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Deleted") return self.runStep() class TestMakeDirectory(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_success(self): self.setupStep(worker.MakeDirectory(dir="d")) self.expectCommands( Expect('mkdir', {'dir': 'd'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Created") return self.runStep() def test_failure(self): self.setupStep(worker.MakeDirectory(dir="d")) self.expectCommands( Expect('mkdir', {'dir': 'd'}) + 1 ) self.expectOutcome(result=FAILURE, state_string="Created (failure)") return self.runStep() def test_render(self): self.setupStep(worker.MakeDirectory(dir=properties.Property("x"))) self.properties.setProperty('x', 'XXX', 'here') self.expectCommands( Expect('mkdir', {'dir': 'XXX'}) + 0 ) self.expectOutcome(result=SUCCESS, state_string="Created") return self.runStep() class CompositeUser(buildstep.LoggingBuildStep, worker.CompositeStepMixin): def __init__(self, payload): self.payload = payload self.logEnviron = False super().__init__() @defer.inlineCallbacks def start(self): self.addLogForRemoteCommands('stdio') try: res = yield self.payload(self) self.payloadComplete(res) except Exception as e: self.failed(e) def payloadComplete(self, res): self.finished(FAILURE if res else SUCCESS) class TestCompositeStepMixin(steps.BuildStepMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() return self.setUpBuildStep() def tearDown(self): return self.tearDownBuildStep() def test_runRemoteCommand(self): cmd_args = ('foo', {'bar': False}) def testFunc(x): x.runRemoteCommand(*cmd_args) self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 0) self.expectOutcome(result=SUCCESS) def test_runRemoteCommandFail(self): cmd_args = ('foo', {'bar': False}) @defer.inlineCallbacks def testFunc(x): yield x.runRemoteCommand(*cmd_args) self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 1) self.expectOutcome(result=FAILURE) return self.runStep() @defer.inlineCallbacks def test_runRemoteCommandFailNoAbandon(self): cmd_args = ('foo', {'bar': False}) @defer.inlineCallbacks def testFunc(x): yield x.runRemoteCommand(*cmd_args, **dict(abandonOnFailure=False)) testFunc.ran = True self.setupStep(CompositeUser(testFunc)) self.expectCommands(Expect(*cmd_args) + 1) self.expectOutcome(result=SUCCESS) yield self.runStep() self.assertTrue(testFunc.ran) def test_rmfile(self): self.setupStep(CompositeUser(lambda x: x.runRmFile("d"))) self.expectCommands( Expect('rmfile', {'path': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mkdir(self): self.setupStep(CompositeUser(lambda x: x.runMkdir("d"))) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_rmdir(self): self.setupStep(CompositeUser(lambda x: x.runRmdir("d"))) self.expectCommands( Expect('rmdir', {'dir': 'd', 'logEnviron': False}) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_mkdir_fail(self): self.setupStep(CompositeUser(lambda x: x.runMkdir("d"))) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_glob(self): @defer.inlineCallbacks def testFunc(x): res = yield x.runGlob("*.pyc") self.assertEqual(res, ["one.pyc", "two.pyc"]) self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + Expect.update('files', ["one.pyc", "two.pyc"]) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_glob_fail(self): self.setupStep(CompositeUser(lambda x: x.runGlob("*.pyc"))) self.expectCommands( Expect('glob', {'path': '*.pyc', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_abandonOnFailure(self): @defer.inlineCallbacks def testFunc(x): yield x.runMkdir("d") yield x.runMkdir("d") self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=FAILURE) return self.runStep() def test_notAbandonOnFailure(self): @defer.inlineCallbacks def testFunc(x): yield x.runMkdir("d", abandonOnFailure=False) yield x.runMkdir("d", abandonOnFailure=False) self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1, Expect('mkdir', {'dir': 'd', 'logEnviron': False}) + 1 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_getFileContentFromWorker(self): @defer.inlineCallbacks def testFunc(x): res = yield x.getFileContentFromWorker("file.txt") self.assertEqual(res, "Hello world!") self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('uploadFile', dict( workersrc="file.txt", workdir='wkdir', blocksize=32 * 1024, maxsize=None, writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_getFileContentFromWorker2_16(self): @defer.inlineCallbacks def testFunc(x): res = yield x.getFileContentFromWorker("file.txt") self.assertEqual(res, "Hello world!") self.setupStep( CompositeUser(testFunc), worker_version={'*': '2.16'}) self.expectCommands( Expect('uploadFile', dict( slavesrc="file.txt", workdir='wkdir', blocksize=32 * 1024, maxsize=None, writer=ExpectRemoteRef(remotetransfer.StringFileWriter))) + Expect.behavior(uploadString("Hello world!")) + 0 ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_downloadFileContentToWorker(self): @defer.inlineCallbacks def testFunc(x): res = yield x.downloadFileContentToWorker("/path/dest1", "file text") self.assertEqual(res, None) exp_args = {'maxsize': None, 'workdir': 'wkdir', 'mode': None, 'reader': ExpectRemoteRef(remotetransfer.FileReader), 'blocksize': 32768, 'workerdest': '/path/dest1'} self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('downloadFile', exp_args) ) self.expectOutcome(result=SUCCESS) return self.runStep() def test_downloadFileContentToWorkerWithFilePermissions(self): @defer.inlineCallbacks def testFunc(x): res = yield x.downloadFileContentToWorker("/path/dest1", "file text", mode=stat.S_IRUSR) self.assertEqual(res, None) exp_args = {'maxsize': None, 'workdir': 'wkdir', 'mode': stat.S_IRUSR, 'reader': ExpectRemoteRef(remotetransfer.FileReader), 'blocksize': 32768, 'workerdest': '/path/dest1'} self.setupStep(CompositeUser(testFunc)) self.expectCommands( Expect('downloadFile', exp_args) ) self.expectOutcome(result=SUCCESS) return self.runStep() buildbot-2.6.0/master/buildbot/test/unit/test_templates_dir/000077500000000000000000000000001361162603000242515ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/unit/test_templates_dir/builds.html000066400000000000000000000000151361162603000264150ustar00rootroot00000000000000
buildbot-2.6.0/master/buildbot/test/unit/test_templates_dir/plugin/000077500000000000000000000000001361162603000255475ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/unit/test_templates_dir/plugin/plugin.jade000066400000000000000000000000531361162603000276700ustar00rootroot00000000000000.myclass pre | this is customized buildbot-2.6.0/master/buildbot/test/unit/test_test_util_gpo.py000066400000000000000000000335251361162603000246600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import twisted from twisted.internet import defer from twisted.internet import utils from twisted.trial import reporter from twisted.trial import unittest from buildbot.test.util.gpo import Expect from buildbot.test.util.gpo import GetProcessOutputMixin class TestGPOMixin(unittest.TestCase): # these tests use self.patch, but the SkipTest exception gets eaten, so # explicitly skip things here. if twisted.version.major <= 9 and sys.version_info[:2] == (2, 7): skip = "unittest.TestCase.patch is not available" def runTestMethod(self, method): class TestCase(GetProcessOutputMixin, unittest.TestCase): def setUp(self): self.setUpGetProcessOutput() def runTest(self): return method(self) self.testcase = TestCase() result = reporter.TestResult() self.testcase.run(result) # This blocks return result def assertTestFailure(self, result, expectedFailure): self.assertEqual(result.errors, []) self.assertEqual(len(result.failures), 1) self.assertTrue(result.failures[0][1].check(unittest.FailTest)) if expectedFailure: self.assertSubstring( expectedFailure, result.failures[0][1].getErrorMessage()) def assertSuccessful(self, result): if not result.wasSuccessful(): output = 'expected success' if result.failures: output += ('\ntest failed: %s' % result.failures[0][1].getErrorMessage()) if result.errors: output += ('\nerrors: %s' % [error[1].value for error in result.errors]) raise self.failureException(output) self.assertTrue(result.wasSuccessful()) def test_patch(self): original_getProcessOutput = utils.getProcessOutput original_getProcessOutputAndValue = utils.getProcessOutputAndValue def method(testcase): testcase.expectCommands() self.assertEqual(utils.getProcessOutput, testcase.patched_getProcessOutput) self.assertEqual(utils.getProcessOutputAndValue, testcase.patched_getProcessOutputAndValue) result = self.runTestMethod(method) self.assertSuccessful(result) self.assertEqual(utils.getProcessOutput, original_getProcessOutput) self.assertEqual(utils.getProcessOutputAndValue, original_getProcessOutputAndValue) def test_methodChaining(self): expect = Expect('command') self.assertEqual(expect, expect.exit(0)) self.assertEqual(expect, expect.stdout(b"output")) self.assertEqual(expect, expect.stderr(b"error")) def test_gpo_oneCommand(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command")) res = yield utils.getProcessOutput("command", ()) self.assertEqual(res, b'') testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_expectTwo_runOne(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command")) testcase.expectCommands(Expect("command2")) res = yield utils.getProcessOutput("command", ()) self.assertEqual(res, b'') testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "assert all expected commands were run") def test_gpo_wrongCommand(self): def method(testcase): testcase.expectCommands(Expect("command2")) d = utils.getProcessOutput("command", ()) return d result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") # assert we have a meaningful message self.assertTestFailure(result, "command2") def test_gpo_wrongArgs(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg")) yield utils.getProcessOutput("command", ("otherarg",)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpo_missingPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg").path("/home")) yield utils.getProcessOutput("command", ("otherarg",)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpo_wrongPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg").path("/home")) yield utils.getProcessOutput("command", ("otherarg",), path="/work") testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpo_notCurrentPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg")) yield utils.getProcessOutput("command", ("otherarg",), path="/work") testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpo_errorOutput(self): def method(testcase): testcase.expectCommands(Expect("command").stderr(b"some test")) d = testcase.assertFailure( utils.getProcessOutput("command", ()), [IOError]) return d result = self.runTestMethod(method) self.assertTestFailure(result, "got stderr: " + repr(b'some test')) def test_gpo_errorOutput_errtoo(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").stderr(b"some test")) res = yield utils.getProcessOutput("command", (), errortoo=True) testcase.assertEqual(res, b"some test") result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_exitIgnored(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").exit(1)) res = yield utils.getProcessOutput("command", ()) self.assertEqual(res, b'') result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_output(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").stdout(b"stdout")) res = yield utils.getProcessOutput("command", ()) testcase.assertEqual(res, b"stdout") result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_outputAndError(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands( Expect("command").stdout(b"stdout").stderr(b"stderr")) res = yield utils.getProcessOutput("command", (), errortoo=True) testcase.assertSubstring(b"stdout", res) testcase.assertSubstring(b"stderr", res) result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_environ_success(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command")) testcase.addGetProcessOutputExpectEnv({'key': 'value'}) res = yield utils.getProcessOutput("command", (), env={'key': 'value'}) self.assertEqual(res, b'') testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpo_environ_wrongValue(self): def method(testcase): testcase.expectCommands(Expect("command")) testcase.addGetProcessOutputExpectEnv({'key': 'value'}) d = utils.getProcessOutput( "command", (), env={'key': 'wrongvalue'}) return d result = self.runTestMethod(method) self.assertTestFailure( result, "Expected environment to have key = 'value'") def test_gpo_environ_missing(self): def method(testcase): testcase.expectCommands(Expect("command")) testcase.addGetProcessOutputExpectEnv({'key': 'value'}) d = utils.getProcessOutput("command", ()) return d result = self.runTestMethod(method) self.assertTestFailure( result, "Expected environment to have key = 'value'") def test_gpoav_oneCommand(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command")) res = yield utils.getProcessOutputAndValue("command", ()) self.assertEqual(res, (b'', b'', 0)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpoav_expectTwo_runOne(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command")) testcase.expectCommands(Expect("command2")) res = yield utils.getProcessOutputAndValue("command", ()) self.assertEqual(res, (b'', b'', 0)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "assert all expected commands were run") def test_gpoav_wrongCommand(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command2")) yield utils.getProcessOutputAndValue("command", ()) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpoav_wrongArgs(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg")) yield utils.getProcessOutputAndValue("command", ("otherarg",)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpoav_missingPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg").path("/home")) yield utils.getProcessOutputAndValue("command", ("otherarg",)) testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpoav_wrongPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg").path("/home")) yield utils.getProcessOutputAndValue( "command", ("otherarg",), path="/work") testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpoav_notCurrentPath(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command", "arg")) yield utils.getProcessOutputAndValue( "command", ("otherarg",), path="/work") testcase.assertAllCommandsRan() result = self.runTestMethod(method) self.assertTestFailure(result, "unexpected command run") def test_gpoav_errorOutput(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").stderr(b"some test")) res = yield utils.getProcessOutputAndValue("command", ()) self.assertEqual(res, (b'', b'some test', 0)) result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpoav_exit(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").exit(1)) res = yield utils.getProcessOutputAndValue("command", ()) self.assertEqual(res, (b'', b'', 1)) result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpoav_output(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands(Expect("command").stdout(b"stdout")) res = yield utils.getProcessOutputAndValue("command", ()) testcase.assertEqual(res, (b"stdout", b'', 0)) result = self.runTestMethod(method) self.assertSuccessful(result) def test_gpoav_outputAndError(self): @defer.inlineCallbacks def method(testcase): testcase.expectCommands( Expect("command").stdout(b"stdout").stderr(b"stderr")) res = yield utils.getProcessOutputAndValue("command", ()) testcase.assertEqual(res, (b"stdout", b'stderr', 0)) result = self.runTestMethod(method) self.assertSuccessful(result) buildbot-2.6.0/master/buildbot/test/unit/test_test_util_validation.py000066400000000000000000000173521361162603000262250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import locale from twisted.python import log from twisted.trial import unittest from buildbot.test.util import validation from buildbot.util import UTC class VerifyDict(unittest.TestCase): def doValidationTest(self, validator, good, bad): for g in good: log.msg('expect %r to be good' % (g,)) msgs = list(validator.validate('g', g)) self.assertEqual(msgs, [], 'messages for %r' % (g,)) for b in bad: log.msg('expect %r to be bad' % (b,)) msgs = list(validator.validate('b', b)) self.assertNotEqual(msgs, [], 'no messages for %r' % (b,)) log.msg('..got messages:') for msg in msgs: log.msg(" " + msg) def test_IntValidator(self): self.doValidationTest(validation.IntValidator(), good=[ 1, 10 ** 100 ], bad=[ 1.0, "one", "1", None ]) def test_BooleanValidator(self): self.doValidationTest(validation.BooleanValidator(), good=[ True, False ], bad=[ "yes", "no", 1, 0, None ]) def test_StringValidator(self): self.doValidationTest(validation.StringValidator(), good=[ "unicode only" ], bad=[ None, b"bytestring" ]) def test_BinaryValidator(self): self.doValidationTest(validation.BinaryValidator(), good=[ b"bytestring" ], bad=[ None, "no unicode" ]) def test_DateTimeValidator(self): self.doValidationTest(validation.DateTimeValidator(), good=[ datetime.datetime( 1980, 6, 15, 12, 31, 15, tzinfo=UTC), ], bad=[ None, 198847493, # no timezone datetime.datetime(1980, 6, 15, 12, 31, 15), ]) def test_IdentifierValidator(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise(unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding))) self.doValidationTest(validation.IdentifierValidator(50), good=[ "linux", "Linux", "abc123", "a" * 50, '\N{SNOWMAN}' ], bad=[ None, '', b'linux', 'a/b', "a.b.c.d", "a-b_c.d9", 'spaces not allowed', "a" * 51, "123 no initial digits", ]) def test_NoneOk(self): self.doValidationTest( validation.NoneOk(validation.BooleanValidator()), good=[ True, False, None ], bad=[ 1, "yes" ]) def test_DictValidator(self): self.doValidationTest(validation.DictValidator( a=validation.BooleanValidator(), b=validation.StringValidator(), optionalNames=['b']), good=[ {'a': True}, {'a': True, 'b': 'xyz'}, ], bad=[ None, 1, "hi", {}, {'a': 1}, {'a': 1, 'b': 'xyz'}, {'a': True, 'b': 999}, {'a': True, 'b': 'xyz', 'c': 'extra'}, ]) def test_DictValidator_names(self): v = validation.DictValidator( a=validation.BooleanValidator()) self.assertEqual(list(v.validate('v', {'a': 1})), [ "v['a'] (1) is not a boolean" ]) def test_ListValidator(self): self.doValidationTest( validation.ListValidator(validation.BooleanValidator()), good=[ [], [True], [False, True], ], bad=[ None, ['a'], [True, 'a'], 1, "hi" ]) def test_ListValidator_names(self): v = validation.ListValidator(validation.BooleanValidator()) self.assertEqual(list(v.validate('v', ['a'])), [ "v[0] ('a') is not a boolean" ]) def test_SourcedPropertiesValidator(self): self.doValidationTest(validation.SourcedPropertiesValidator(), good=[ {'pname': ('{"a":"b"}', 'test')}, ], bad=[ None, 1, b"hi", {'pname': {b'a': b'b'}}, # no source # name not unicode {'pname': ({b'a': b'b'}, 'test')}, # source not unicode {'pname': ({b'a': b'b'}, 'test')}, # self is not json-able {'pname': (self, 'test')}, ]) def test_MessageValidator(self): self.doValidationTest(validation.MessageValidator( events=[b'started', b'stopped'], messageValidator=validation.DictValidator( a=validation.BooleanValidator(), xid=validation.IntValidator(), yid=validation.IntValidator())), good=[ (('thing', '1', '2', 'started'), {'xid': 1, 'yid': 2, 'a': True}), ], bad=[ # routingKey is not a tuple ('thing', {}), # routingKey has wrong event (('thing', '1', '2', 'exploded'), {'xid': 1, 'yid': 2, 'a': True}), # routingKey element has wrong type (('thing', 1, 2, 'started'), {'xid': 1, 'yid': 2, 'a': True}), # routingKey element isn't in message (('thing', '1', '2', 'started'), {'xid': 1, 'a': True}), # message doesn't validate (('thing', '1', '2', 'started'), {'xid': 1, 'yid': 2, 'a': 'x'}), ]) def test_Selector(self): sel = validation.Selector() sel.add(lambda x: x == 'int', validation.IntValidator()) sel.add(lambda x: x == 'str', validation.StringValidator()) self.doValidationTest(sel, good=[ ('int', 1), ('str', 'hi'), ], bad=[ ('int', 'hi'), ('str', 1), ('float', 1.0), ]) buildbot-2.6.0/master/buildbot/test/unit/test_test_util_warnings.py000066400000000000000000000131031361162603000257110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import warnings from twisted.trial import unittest from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.test.util.warnings import assertProducesWarning from buildbot.test.util.warnings import assertProducesWarnings from buildbot.test.util.warnings import ignoreWarning class SomeWarning(Warning): pass class OtherWarning(Warning): pass class TestWarningsFilter(unittest.TestCase): def test_warnigs_caught(self): # Assertion is correct. with assertProducesWarning(SomeWarning): warnings.warn("test", SomeWarning) def test_warnigs_caught_num_check(self): # Assertion is correct. with assertProducesWarnings(SomeWarning, num_warnings=3): warnings.warn("1", SomeWarning) warnings.warn("2", SomeWarning) warnings.warn("3", SomeWarning) def test_warnigs_caught_num_check_fail(self): def f1(): with assertProducesWarnings(SomeWarning, num_warnings=2): pass with self.assertRaises(AssertionError): f1() def f2(): with assertProducesWarnings(SomeWarning, num_warnings=2): warnings.warn("1", SomeWarning) with self.assertRaises(AssertionError): f2() def f3(): with assertProducesWarnings(SomeWarning, num_warnings=2): warnings.warn("1", SomeWarning) warnings.warn("2", SomeWarning) warnings.warn("3", SomeWarning) with self.assertRaises(AssertionError): f3() def test_warnigs_caught_pattern_check(self): # Assertion is correct. with assertProducesWarning(SomeWarning, message_pattern=r"t.st"): warnings.warn("The test", SomeWarning) def test_warnigs_caught_pattern_check_fail(self): def f(): # Assertion fails. with assertProducesWarning(SomeWarning, message_pattern=r"other"): warnings.warn("The test", SomeWarning) with self.assertRaises(AssertionError): f() def test_warnigs_caught_patterns_check(self): # Assertion is correct. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2", "3"]): warnings.warn("log 1 message", SomeWarning) warnings.warn("log 2 message", SomeWarning) warnings.warn("log 3 message", SomeWarning) def test_warnigs_caught_patterns_check_fails(self): def f1(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 1", SomeWarning) with self.assertRaises(AssertionError): f1() def f2(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 2", SomeWarning) warnings.warn("msg 1", SomeWarning) with self.assertRaises(AssertionError): f2() def f3(): # Assertion fails. with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2"]): warnings.warn("msg 1", SomeWarning) warnings.warn("msg 2", SomeWarning) warnings.warn("msg 3", SomeWarning) with self.assertRaises(AssertionError): f3() def test_no_warnigs_check(self): with assertNotProducesWarnings(SomeWarning): pass with ignoreWarning(OtherWarning): with assertNotProducesWarnings(SomeWarning): warnings.warn("msg 3", OtherWarning) def test_warnigs_filter(self): with ignoreWarning(OtherWarning): with assertProducesWarnings(SomeWarning, messages_patterns=["1", "2", "3"]): warnings.warn("other", OtherWarning) warnings.warn("log 1 message", SomeWarning) warnings.warn("other", OtherWarning) warnings.warn("log 2 message", SomeWarning) warnings.warn("other", OtherWarning) warnings.warn("log 3 message", SomeWarning) warnings.warn("other", OtherWarning) def test_nested_filters(self): with assertProducesWarnings(SomeWarning, messages_patterns=["some 1"]): with assertProducesWarnings(OtherWarning, messages_patterns=["other 1"]): warnings.warn("other 1", OtherWarning) warnings.warn("some 1", SomeWarning) def test_ignore_warnings(self): with assertNotProducesWarnings(SomeWarning): with ignoreWarning(SomeWarning): warnings.warn("some 1", SomeWarning) buildbot-2.6.0/master/buildbot/test/unit/test_util.py000066400000000000000000000417121361162603000227510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import locale import os import mock from twisted.internet import reactor from twisted.internet import task from twisted.trial import unittest from buildbot import util class formatInterval(unittest.TestCase): def test_zero(self): self.assertEqual(util.formatInterval(0), "0 secs") def test_seconds_singular(self): self.assertEqual(util.formatInterval(1), "1 secs") def test_seconds(self): self.assertEqual(util.formatInterval(7), "7 secs") def test_minutes_one(self): self.assertEqual(util.formatInterval(60), "60 secs") def test_minutes_over_one(self): self.assertEqual(util.formatInterval(61), "1 mins, 1 secs") def test_minutes(self): self.assertEqual(util.formatInterval(300), "5 mins, 0 secs") def test_hours_one(self): self.assertEqual(util.formatInterval(3600), "60 mins, 0 secs") def test_hours_over_one_sec(self): self.assertEqual(util.formatInterval(3601), "1 hrs, 1 secs") def test_hours_over_one_min(self): self.assertEqual(util.formatInterval(3660), "1 hrs, 60 secs") def test_hours(self): self.assertEqual(util.formatInterval(7200), "2 hrs, 0 secs") def test_mixed(self): self.assertEqual(util.formatInterval(7392), "2 hrs, 3 mins, 12 secs") class TestHumanReadableDelta(unittest.TestCase): def test_timeDeltaToHumanReadable(self): """ It will return a human readable time difference. """ try: datetime.datetime.fromtimestamp(1) except OSError: raise unittest.SkipTest( "Python 3.6 bug on Windows: " "https://bugs.python.org/issue29097") result = util.human_readable_delta(1, 1) self.assertEqual('super fast', result) result = util.human_readable_delta(1, 2) self.assertEqual('1 seconds', result) result = util.human_readable_delta(1, 61) self.assertEqual('1 minutes', result) result = util.human_readable_delta(1, 62) self.assertEqual('1 minutes, 1 seconds', result) result = util.human_readable_delta(1, 60 * 60 + 1) self.assertEqual('1 hours', result) result = util.human_readable_delta(1, 60 * 60 + 61) self.assertEqual('1 hours, 1 minutes', result) result = util.human_readable_delta(1, 60 * 60 + 62) self.assertEqual('1 hours, 1 minutes, 1 seconds', result) result = util.human_readable_delta(1, 24 * 60 * 60 + 1) self.assertEqual('1 days', result) result = util.human_readable_delta(1, 24 * 60 * 60 + 2) self.assertEqual('1 days, 1 seconds', result) class TestFuzzyInterval(unittest.TestCase): def test_moment(self): self.assertEqual(util.fuzzyInterval(1), "a moment") def test_seconds(self): self.assertEqual(util.fuzzyInterval(17), "17 seconds") def test_seconds_rounded(self): self.assertEqual(util.fuzzyInterval(48), "50 seconds") def test_minute(self): self.assertEqual(util.fuzzyInterval(58), "a minute") def test_minutes(self): self.assertEqual(util.fuzzyInterval(3 * 60 + 24), "3 minutes") def test_minutes_rounded(self): self.assertEqual(util.fuzzyInterval(32 * 60 + 24), "30 minutes") def test_hour(self): self.assertEqual(util.fuzzyInterval(3600 + 1200), "an hour") def test_hours(self): self.assertEqual(util.fuzzyInterval(9 * 3600 - 720), "9 hours") def test_day(self): self.assertEqual(util.fuzzyInterval(32 * 3600 + 124), "a day") def test_days(self): self.assertEqual(util.fuzzyInterval((19 + 24) * 3600 + 124), "2 days") def test_month(self): self.assertEqual(util.fuzzyInterval(36 * 24 * 3600 + 124), "a month") def test_months(self): self.assertEqual(util.fuzzyInterval(86 * 24 * 3600 + 124), "3 months") def test_year(self): self.assertEqual(util.fuzzyInterval(370 * 24 * 3600), "a year") def test_years(self): self.assertEqual(util.fuzzyInterval((2 * 365 + 96) * 24 * 3600), "2 years") class safeTranslate(unittest.TestCase): def test_str_good(self): self.assertEqual(util.safeTranslate(str("full")), b"full") def test_str_bad(self): self.assertEqual(util.safeTranslate(str("speed=slow;quality=high")), b"speed_slow_quality_high") def test_str_pathological(self): # if you needed proof this wasn't for use with sensitive data self.assertEqual(util.safeTranslate(str("p\ath\x01ogy")), b"p\ath\x01ogy") # bad chars still here! def test_unicode_good(self): self.assertEqual(util.safeTranslate("full"), b"full") def test_unicode_bad(self): self.assertEqual(util.safeTranslate(str("speed=slow;quality=high")), b"speed_slow_quality_high") def test_unicode_pathological(self): self.assertEqual(util.safeTranslate("\u0109"), b"\xc4\x89") # yuck! class naturalSort(unittest.TestCase): def test_alpha(self): self.assertEqual( util.naturalSort(['x', 'aa', 'ab']), ['aa', 'ab', 'x']) def test_numeric(self): self.assertEqual( util.naturalSort(['1', '10', '11', '2', '20']), ['1', '2', '10', '11', '20']) def test_alphanum(self): l1 = 'aa10ab aa1ab aa10aa f a aa3 aa30 aa3a aa30a'.split() l2 = 'a aa1ab aa3 aa3a aa10aa aa10ab aa30 aa30a f'.split() self.assertEqual(util.naturalSort(l1), l2) class none_or_str(unittest.TestCase): def test_none(self): self.assertEqual(util.none_or_str(None), None) def test_str(self): self.assertEqual(util.none_or_str("hi"), "hi") def test_int(self): self.assertEqual(util.none_or_str(199), "199") class TimeFunctions(unittest.TestCase): def test_UTC(self): self.assertEqual(util.UTC.utcoffset(datetime.datetime.now()), datetime.timedelta(0)) self.assertEqual(util.UTC.dst(datetime.datetime.now()), datetime.timedelta(0)) self.assertEqual(util.UTC.tzname(datetime.datetime.utcnow()), "UTC") def test_epoch2datetime(self): self.assertEqual(util.epoch2datetime(0), datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=util.UTC)) self.assertEqual(util.epoch2datetime(1300000000), datetime.datetime(2011, 3, 13, 7, 6, 40, tzinfo=util.UTC)) def test_datetime2epoch(self): dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=util.UTC) self.assertEqual(util.datetime2epoch(dt), 0) dt = datetime.datetime(2011, 3, 13, 7, 6, 40, tzinfo=util.UTC) self.assertEqual(util.datetime2epoch(dt), 1300000000) class DiffSets(unittest.TestCase): def test_empty(self): removed, added = util.diffSets(set([]), set([])) self.assertEqual((removed, added), (set([]), set([]))) def test_no_lists(self): removed, added = util.diffSets([1, 2], [2, 3]) self.assertEqual((removed, added), (set([1]), set([3]))) def test_no_overlap(self): removed, added = util.diffSets(set([1, 2]), set([3, 4])) self.assertEqual((removed, added), (set([1, 2]), set([3, 4]))) def test_no_change(self): removed, added = util.diffSets(set([1, 2]), set([1, 2])) self.assertEqual((removed, added), (set([]), set([]))) def test_added(self): removed, added = util.diffSets(set([1, 2]), set([1, 2, 3])) self.assertEqual((removed, added), (set([]), set([3]))) def test_removed(self): removed, added = util.diffSets(set([1, 2]), set([1])) self.assertEqual((removed, added), (set([2]), set([]))) class MakeList(unittest.TestCase): def test_empty_string(self): self.assertEqual(util.makeList(''), ['']) def test_None(self): self.assertEqual(util.makeList(None), []) def test_string(self): self.assertEqual(util.makeList('hello'), ['hello']) def test_unicode(self): self.assertEqual(util.makeList('\N{SNOWMAN}'), ['\N{SNOWMAN}']) def test_list(self): self.assertEqual(util.makeList(['a', 'b']), ['a', 'b']) def test_tuple(self): self.assertEqual(util.makeList(('a', 'b')), ['a', 'b']) def test_copy(self): input = ['a', 'b'] output = util.makeList(input) input.append('c') self.assertEqual(output, ['a', 'b']) class Flatten(unittest.TestCase): def test_simple(self): self.assertEqual(util.flatten([1, 2, 3]), [1, 2, 3]) def test_deep(self): self.assertEqual(util.flatten([[1, 2], 3, [[4]]]), [1, 2, 3, 4]) # def test_deeply_nested(self): # self.assertEqual(util.flatten([5, [6, (7, 8)]]), # [5, 6, 7, 8]) # def test_tuples(self): # self.assertEqual(util.flatten([(1, 2), 3]), [1, 2, 3]) def test_dict(self): d = {'a': [5, 6, 7], 'b': [7, 8, 9]} self.assertEqual(util.flatten(d), d) def test_string(self): self.assertEqual(util.flatten("abc"), "abc") class Ascii2Unicode(unittest.TestCase): def test_unicode(self): rv = util.bytes2unicode('\N{SNOWMAN}', encoding='ascii') self.assertEqual((rv, type(rv)), ('\N{SNOWMAN}', str)) def test_ascii(self): rv = util.bytes2unicode('abcd', encoding='ascii') self.assertEqual((rv, type(rv)), ('abcd', str)) def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.bytes2unicode(b'a\x85', encoding='ascii') def test_None(self): self.assertEqual(util.bytes2unicode(None, encoding='ascii'), None) def test_bytes2unicode(self): rv1 = util.bytes2unicode(b'abcd') rv2 = util.bytes2unicode('efgh') self.assertEqual(type(rv1), str) self.assertEqual(type(rv2), str) class StringToBoolean(unittest.TestCase): def test_it(self): stringValues = [ (b'on', True), (b'true', True), (b'yes', True), (b'1', True), (b'off', False), (b'false', False), (b'no', False), (b'0', False), (b'ON', True), (b'TRUE', True), (b'YES', True), (b'OFF', False), (b'FALSE', False), (b'NO', False), ] for s, b in stringValues: self.assertEqual(util.string2boolean(s), b, repr(s)) def test_ascii(self): rv = util.bytes2unicode(b'abcd', encoding='ascii') self.assertEqual((rv, type(rv)), ('abcd', str)) def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.bytes2unicode(b'a\x85', encoding='ascii') def test_None(self): self.assertEqual(util.bytes2unicode(None, encoding='ascii'), None) class AsyncSleep(unittest.TestCase): def test_sleep(self): clock = task.Clock() self.patch(reactor, 'callLater', clock.callLater) d = util.asyncSleep(2) self.assertFalse(d.called) clock.advance(1) self.assertFalse(d.called) clock.advance(1) self.assertTrue(d.called) class FunctionalEnvironment(unittest.TestCase): def test_working_locale(self): environ = {'LANG': 'en_GB.UTF-8'} self.patch(os, 'environ', environ) config = mock.Mock() util.check_functional_environment(config) self.assertEqual(config.error.called, False) def test_broken_locale(self): def err(): raise KeyError self.patch(locale, 'getdefaultlocale', err) config = mock.Mock() util.check_functional_environment(config) config.error.assert_called_with(mock.ANY) class StripUrlPassword(unittest.TestCase): def test_simple_url(self): self.assertEqual(util.stripUrlPassword('http://foo.com/bar'), 'http://foo.com/bar') def test_username(self): self.assertEqual(util.stripUrlPassword('http://d@foo.com/bar'), 'http://d@foo.com/bar') def test_username_with_at(self): self.assertEqual(util.stripUrlPassword('http://d@bb.net@foo.com/bar'), 'http://d@bb.net@foo.com/bar') def test_username_pass(self): self.assertEqual(util.stripUrlPassword('http://d:secret@foo.com/bar'), 'http://d:xxxx@foo.com/bar') def test_username_pass_with_at(self): self.assertEqual( util.stripUrlPassword('http://d@bb.net:scrt@foo.com/bar'), 'http://d@bb.net:xxxx@foo.com/bar') class JoinList(unittest.TestCase): def test_list(self): self.assertEqual(util.join_list(['aa', 'bb']), 'aa bb') def test_tuple(self): self.assertEqual(util.join_list(('aa', 'bb')), 'aa bb') def test_string(self): self.assertEqual(util.join_list('abc'), 'abc') def test_unicode(self): self.assertEqual(util.join_list('abc'), 'abc') def test_nonascii(self): with self.assertRaises(UnicodeDecodeError): util.join_list([b'\xff']) class CommandToString(unittest.TestCase): def test_short_string(self): self.assertEqual(util.command_to_string("ab cd"), "'ab cd'") def test_long_string(self): self.assertEqual(util.command_to_string("ab cd ef"), "'ab cd ...'") def test_list(self): self.assertEqual(util.command_to_string(['ab', 'cd', 'ef']), "'ab cd ...'") def test_nested_list(self): self.assertEqual(util.command_to_string(['ab', ['cd', ['ef']]]), "'ab cd ...'") def test_object(self): # this looks like a renderable self.assertEqual(util.command_to_string(object()), None) def test_list_with_objects(self): # the object looks like a renderable, and is skipped self.assertEqual(util.command_to_string(['ab', object(), 'cd']), "'ab cd'") def test_invalid_ascii(self): self.assertEqual(util.command_to_string(b'a\xffc'), "'a\ufffdc'") class TestRewrap(unittest.TestCase): def test_main(self): tests = [ ("", "", None), ("\n", "\n", None), ("\n ", "\n", None), (" \n", "\n", None), (" \n ", "\n", None), (""" multiline with indent """, "\nmultiline with indent", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent """, "multiline with indent\n", None), ("""\ multiline with indent and formatting """, "multiline with indent\n and\n formatting\n", None), ("""\ multiline with indent and wrapping and formatting """, "multiline with\nindent and\nwrapping\n and\n formatting\n", 15), ] for text, expected, width in tests: self.assertEqual(util.rewrap(text, width=width), expected) class TestMerge(unittest.TestCase): def test_merge(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1} }, { 'a': {'c': 2} }), { 'a': {'b': 1, 'c': 2} }) def test_overwrite(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1} }, { 'a': 1 }), { 'a': 1 }) def test_overwrite2(self): self.assertEqual( util.dictionary_merge( { 'a': {'b': 1, 'c': 2} }, { 'a': {'b': [1, 2, 3]} }), { 'a': {'b': [1, 2, 3], 'c': 2} }) buildbot-2.6.0/master/buildbot/test/unit/test_util_ComparableMixin.py000066400000000000000000000063131361162603000261010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import util class ComparableMixin(unittest.TestCase): class Foo(util.ComparableMixin): compare_attrs = ("a", "b") def __init__(self, a, b, c): self.a, self.b, self.c = a, b, c class Bar(Foo, util.ComparableMixin): compare_attrs = ("b", "c") def setUp(self): self.f123 = self.Foo(1, 2, 3) self.f124 = self.Foo(1, 2, 4) self.f134 = self.Foo(1, 3, 4) self.b123 = self.Bar(1, 2, 3) self.b223 = self.Bar(2, 2, 3) self.b213 = self.Bar(2, 1, 3) def test_equality_identity(self): self.assertEqual(self.f123, self.f123) def test_equality_same(self): another_f123 = self.Foo(1, 2, 3) self.assertEqual(self.f123, another_f123) def test_equality_unimportantDifferences(self): self.assertEqual(self.f123, self.f124) def test_inequality_unimportantDifferences_subclass(self): # verify that the parent class's compare_attrs does # affect the subclass self.assertNotEqual(self.b123, self.b223) def test_inequality_importantDifferences(self): self.assertNotEqual(self.f123, self.f134) def test_inequality_importantDifferences_subclass(self): self.assertNotEqual(self.b123, self.b213) def test_inequality_differentClasses(self): self.assertNotEqual(self.f123, self.b123) def test_instance_attribute_not_used(self): # setting compare_attrs as an instance method doesn't # affect the outcome of the comparison another_f123 = self.Foo(1, 2, 3) another_f123.compare_attrs = ("b", "a") self.assertEqual(self.f123, another_f123) def test_ne_importantDifferences(self): self.assertNotEqual(self.f123, self.f134) def test_ne_differentClasses(self): self.assertNotEqual(self.f123, self.b123) def test_compare(self): self.assertEqual(self.f123, self.f123) self.assertNotEqual(self.b223, self.b213) self.assertGreater(self.b223, self.b213) # Different classes self.assertFalse(self.b223 > self.f123) self.assertGreaterEqual(self.b223, self.b213) self.assertGreaterEqual(self.b223, self.b223) # Different classes self.assertFalse(self.f123 >= self.b123) self.assertLess(self.b213, self.b223) self.assertLessEqual(self.b213, self.b223) self.assertLessEqual(self.b213, self.b213) # Different classes self.assertFalse(self.f123 <= self.b123) buildbot-2.6.0/master/buildbot/test/unit/test_util_bbcollections.py000066400000000000000000000045301361162603000256500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import bbcollections class KeyedSets(unittest.TestCase): def setUp(self): self.ks = bbcollections.KeyedSets() def test_getitem_default(self): self.assertEqual(self.ks['x'], set()) # remaining tests effectively cover __getitem__ def test_add(self): self.ks.add('y', 2) self.assertEqual(self.ks['y'], set([2])) def test_add_twice(self): self.ks.add('z', 2) self.ks.add('z', 4) self.assertEqual(self.ks['z'], set([2, 4])) def test_discard_noError(self): self.ks.add('full', 12) self.ks.discard('empty', 13) # should not fail self.ks.discard('full', 13) # nor this self.assertEqual(self.ks['full'], set([12])) def test_discard_existing(self): self.ks.add('yarn', 'red') self.ks.discard('yarn', 'red') self.assertEqual(self.ks['yarn'], set([])) def test_contains_true(self): self.ks.add('yarn', 'red') self.assertTrue('yarn' in self.ks) def test_contains_false(self): self.assertFalse('yarn' in self.ks) def test_contains_setNamesNotContents(self): self.ks.add('yarn', 'red') self.assertFalse('red' in self.ks) def test_pop_exists(self): self.ks.add('names', 'pop') self.ks.add('names', 'coke') self.ks.add('names', 'soda') popped = self.ks.pop('names') remaining = self.ks['names'] self.assertEqual((popped, remaining), (set(['pop', 'coke', 'soda']), set())) def test_pop_missing(self): self.assertEqual(self.ks.pop('flavors'), set()) buildbot-2.6.0/master/buildbot/test/unit/test_util_codebase.py000066400000000000000000000113721361162603000245750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util import scheduler from buildbot.test.util.misc import TestReactorMixin from buildbot.util import codebase from buildbot.util import state class FakeObject(codebase.AbsoluteSourceStampsMixin, state.StateMixin): name = 'fake-name' def __init__(self, master, codebases): self.master = master self.codebases = codebases class TestAbsoluteSourceStampsMixin(unittest.TestCase, scheduler.SchedulerMixin, TestReactorMixin): codebases = {'a': {'repository': '', 'branch': 'master'}, 'b': {'repository': '', 'branch': 'master'}} def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.db = self.master.db self.object = FakeObject(self.master, self.codebases) def mkch(self, **kwargs): ch = self.makeFakeChange(**kwargs) self.master.db.changes.fakeAddChangeInstance(ch) return ch @defer.inlineCallbacks def test_getCodebaseDict(self): cbd = yield self.object.getCodebaseDict('a') self.assertEqual(cbd, {'repository': '', 'branch': 'master'}) @defer.inlineCallbacks def test_getCodebaseDict_not_found(self): d = self.object.getCodebaseDict('c') yield self.assertFailure(d, KeyError) @defer.inlineCallbacks def test_getCodebaseDict_existing(self): self.db.state.fakeState('fake-name', 'FakeObject', lastCodebases={'a': { 'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}}) cbd = yield self.object.getCodebaseDict('a') self.assertEqual(cbd, {'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}) cbd = yield self.object.getCodebaseDict('b') self.assertEqual(cbd, {'repository': '', 'branch': 'master'}) @defer.inlineCallbacks def test_recordChange(self): yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='1234:abc', branch='master', number=10)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}}) @defer.inlineCallbacks def test_recordChange_older(self): self.db.state.fakeState('fake-name', 'FakeObject', lastCodebases={'a': { 'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20}}) yield self.object.getCodebaseDict('a') yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='1234:abc', branch='master', number=10)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20}}) @defer.inlineCallbacks def test_recordChange_newer(self): self.db.state.fakeState('fake-name', 'FakeObject', lastCodebases={'a': { 'repository': 'A', 'revision': '1234:abc', 'branch': 'master', 'lastChange': 10}}) yield self.object.getCodebaseDict('a') yield self.object.recordChange(self.mkch(codebase='a', repository='A', revision='2345:bcd', branch='master', number=20)) self.db.state.assertStateByClass('fake-name', 'FakeObject', lastCodebases={ 'a': {'repository': 'A', 'revision': '2345:bcd', 'branch': 'master', 'lastChange': 20}}) buildbot-2.6.0/master/buildbot/test/unit/test_util_debounce.py000066400000000000000000000203001361162603000246030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from twisted.python import failure from twisted.python import log from twisted.trial import unittest from buildbot.util import debounce class DebouncedClass: def __init__(self, reactor): self.callDeferred = None self.calls = 0 self.expCalls = 0 self.stopDeferreds = [] self.reactor = reactor @debounce.method(wait=4.0, get_reactor=lambda self: self.reactor) def maybe(self): assert not self.callDeferred self.calls += 1 log.msg('debounced function called') self.callDeferred = defer.Deferred() @self.callDeferred.addBoth def unset(x): log.msg('debounced function complete') self.callDeferred = None return x return self.callDeferred class DebounceTest(unittest.TestCase): def setUp(self): self.clock = task.Clock() def scenario(self, events): dbs = dict((k, DebouncedClass(self.clock)) for k in {n for n, _, _ in events}) while events: n, t, e = events.pop(0) db = dbs[n] log.msg('time=%f, event=%s' % (t, e)) if t > self.clock.seconds(): self.clock.advance(t - self.clock.seconds()) if e == 'maybe': db.maybe() elif e == 'called': db.expCalls += 1 elif e == 'complete': db.callDeferred.callback(None) elif e == 'fail': db.callDeferred.errback(failure.Failure(RuntimeError())) elif e == 'failure_logged': self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) elif e == 'check': pass # just check the expCalls elif e == 'start': db.maybe.start() elif e in ('stop', 'stop-and-called'): db.stopDeferreds.append(db.maybe.stop()) if e == 'stop-and-called': db.expCalls += 1 elif e == 'stopNotComplete': self.assertFalse(db.stopDeferreds[-1].called) elif e == 'stopComplete': self.assertTrue(db.stopDeferreds[-1].called) db.stopDeferreds.pop() else: self.fail("unknown scenario event %s" % e) for db in dbs.values(): self.assertEqual(db.calls, db.expCalls) def test_called_once(self): """The debounced method is called only after 4 seconds""" self.scenario([ (1, 0.0, 'maybe'), (1, 2.0, 'check'), (1, 4.0, 'called'), (1, 5.0, 'check'), (1, 6.0, 'complete'), (1, 7.0, 'check') ]) def test_coalesce_calls(self): """Multiple calls are coalesced during 4 seconds, but the function runs 4 seconds after the first call.""" self.scenario([ (1, 0.0, 'maybe'), (1, 1.0, 'maybe'), (1, 2.0, 'maybe'), (1, 3.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'check'), (1, 6.0, 'complete'), (1, 7.0, 'check'), ]) def test_second_call_during_first(self): """If the debounced method is called after an execution has begun, then a second execution will take place 4 seconds after the execution finishes, with intervening calls coalesced.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'maybe'), (1, 6.0, 'complete'), (1, 7.0, 'maybe'), (1, 9.0, 'maybe'), (1, 10.0, 'called'), (1, 11.0, 'check'), ]) def test_failure_logged(self): """If the debounced method fails, the error is logged, but otherwise it behaves as if it had succeeded.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'maybe'), (1, 6.0, 'fail'), (1, 6.0, 'failure_logged'), (1, 10.0, 'called'), (1, 11.0, 'check'), ]) def test_instance_independence(self): """The timers for two instances are independent.""" self.scenario([ (1, 0.0, 'maybe'), (2, 2.0, 'maybe'), (1, 4.0, 'called'), (2, 6.0, 'called'), (1, 6.0, 'complete'), (2, 6.0, 'complete'), (1, 7.0, 'check'), ]) def test_start_when_started(self): """Calling meth.start when already started has no effect""" self.scenario([ (1, 0.0, 'start'), (1, 1.0, 'start'), ]) def test_stop_while_idle(self): """If the debounced method is stopped while idle, subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'stop'), (1, 0.0, 'stopComplete'), (1, 1.0, 'maybe'), (1, 6.0, 'check'), # not called ]) def test_stop_while_waiting(self): """If the debounced method is stopped while waiting, the waiting call occurs immediately, stop returns immediately, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 2.0, 'stop-and-called'), (1, 2.1, 'complete'), (1, 2.1, 'stopComplete'), (1, 3.0, 'maybe'), (1, 8.0, 'check'), # not called ]) def test_stop_while_running(self): """If the debounced method is stopped while running, the running call completes, stop returns only after the call completes, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_multiple_stops(self): """Multiple stop calls will return individually when the method completes.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 5.0, 'stop'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_stop_while_running_queued(self): """If the debounced method is stopped while running with another call queued, the running call completes, stop returns only after the call completes, the queued call never occurs, and subsequent calls do nothing.""" self.scenario([ (1, 0.0, 'maybe'), (1, 4.0, 'called'), (1, 4.5, 'maybe'), (1, 5.0, 'stop'), (1, 5.0, 'stopNotComplete'), (1, 6.0, 'complete'), (1, 6.0, 'stopComplete'), (1, 6.0, 'maybe'), (1, 10.0, 'check'), # not called ]) def test_start_after_stop(self): """After a stop and subsequent start, a call to the debounced method causes an invocation 4 seconds later.""" self.scenario([ (1, 0.0, 'stop'), (1, 1.0, 'maybe'), (1, 2.0, 'start'), (1, 2.0, 'maybe'), (1, 5.0, 'check'), # not called (1, 6.0, 'called'), ]) buildbot-2.6.0/master/buildbot/test/unit/test_util_deferwaiter.py000066400000000000000000000144311361162603000253300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.util import asyncSleep from buildbot.util.deferwaiter import DeferWaiter from buildbot.util.deferwaiter import RepeatedActionHandler class TestException(Exception): pass class WaiterTests(unittest.TestCase): def test_add_deferred_called(self): w = DeferWaiter() w.add(defer.succeed(None)) d = w.wait() self.assertTrue(d.called) def test_add_non_deferred(self): w = DeferWaiter() w.add(2) d = w.wait() self.assertTrue(d.called) def test_add_deferred_not_called_and_call_later(self): w = DeferWaiter() d1 = defer.Deferred() w.add(d1) d = w.wait() self.assertFalse(d.called) d1.callback(None) self.assertTrue(d.called) class RepeatedActionHandlerTests(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_does_not_add_action_on_start(self): w = DeferWaiter() times = [] def action(): times.append(self.reactor.seconds()) h = RepeatedActionHandler(self.reactor, w, 1, action) self.reactor.advance(2) h.stop() self.assertEqual(len(times), 0) d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_runs_action_with_timer(self, name, timer_after_action): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_runs_action_after_exception_with_timer(self, name, timer_after_action): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) if len(times) == 2: raise TestException() h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() d = w.wait() self.assertTrue(d.called) self.flushLoggedErrors(TestException) yield d @defer.inlineCallbacks def test_ignores_duplicate_start_or_stop(self): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action) h.start() h.start() self.reactor.pump([0.1] * 35) self.assertEqual(times, [1.1, 2.1, 3.1]) h.stop() h.stop() d = w.wait() self.assertTrue(d.called) yield d @defer.inlineCallbacks def test_can_update_interval(self): w = DeferWaiter() times = [] def action(): times.append(round(self.reactor.seconds(), 1)) h = RepeatedActionHandler(self.reactor, w, 1, action) h.start() self.reactor.pump([0.1] * 15) h.setInterval(2) self.reactor.pump([0.1] * 50) self.assertEqual(times, [1.1, 2.1, 4.1, 6.2]) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True, [1.1, 2.6, 4.1]), ('before_action', False, [1.1, 2.1, 3.1, 4.1]), ]) @defer.inlineCallbacks def test_runs_action_with_timer_delay(self, name, timer_after_action, expected_times): w = DeferWaiter() times = [] @defer.inlineCallbacks def action(): times.append(round(self.reactor.seconds(), 1)) yield asyncSleep(0.5, reactor=self.reactor) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 47) self.assertEqual(times, expected_times) h.stop() d = w.wait() self.assertTrue(d.called) yield d @parameterized.expand([ ('after_action', True), ('before_action', False), ]) @defer.inlineCallbacks def test_waiter_waits_for_action_timer_starts(self, name, timer_after_action): w = DeferWaiter() times = [] @defer.inlineCallbacks def action(): times.append(round(self.reactor.seconds(), 1)) yield asyncSleep(0.5, reactor=self.reactor) h = RepeatedActionHandler(self.reactor, w, 1, action, start_timer_after_action_completes=timer_after_action) h.start() self.reactor.pump([0.1] * 12) self.assertEqual(times, [1.1]) d = w.wait() self.assertFalse(d.called) h.stop() self.assertFalse(d.called) self.reactor.pump([0.1] * 5) # action started on 1.1, will end at 1.6 self.assertTrue(d.called) yield d buildbot-2.6.0/master/buildbot/test/unit/test_util_eventual.py000066400000000000000000000070671361162603000246610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from twisted.trial import unittest from buildbot.util import eventual class Eventually(unittest.TestCase): def setUp(self): # reset the queue to its base state eventual._theSimpleQueue = eventual._SimpleCallQueue() self.old_log_err = log.err self.results = [] def tearDown(self): log.err = self.old_log_err return eventual.flushEventualQueue() # utility callback def cb(self, *args, **kwargs): r = args if kwargs: r = r + (kwargs,) self.results.append(r) # flush the queue and assert results @defer.inlineCallbacks def assertResults(self, exp): yield eventual.flushEventualQueue() self.assertEqual(self.results, exp) # tests def test_eventually_calls(self): eventual.eventually(self.cb) return self.assertResults([()]) def test_eventually_args(self): eventual.eventually(self.cb, 1, 2, a='a') return self.assertResults([(1, 2, dict(a='a'))]) def test_eventually_err(self): # monkey-patch log.err; this is restored by tearDown log.err = lambda: self.results.append("err") def cb_fails(): raise RuntimeError("should not cause test failure") eventual.eventually(cb_fails) return self.assertResults(['err']) def test_eventually_butNotNow(self): eventual.eventually(self.cb, 1) self.assertFalse(self.results != []) return self.assertResults([(1,)]) def test_eventually_order(self): eventual.eventually(self.cb, 1) eventual.eventually(self.cb, 2) eventual.eventually(self.cb, 3) return self.assertResults([(1,), (2,), (3,)]) def test_flush_waitForChainedEventuallies(self): def chain(n): self.results.append(n) if n <= 0: return eventual.eventually(chain, n - 1) chain(3) # (the flush this tests is implicit in assertResults) return self.assertResults([3, 2, 1, 0]) def test_flush_waitForTreeEventuallies(self): # a more complex set of eventualities def tree(n): self.results.append(n) if n <= 0: return eventual.eventually(tree, n - 1) eventual.eventually(tree, n - 1) tree(2) # (the flush this tests is implicit in assertResults) return self.assertResults([2, 1, 1, 0, 0, 0, 0]) def test_flush_duringTurn(self): testd = defer.Deferred() def cb(): d = eventual.flushEventualQueue() d.addCallback(testd.callback) eventual.eventually(cb) return testd def test_fireEventually_call(self): d = eventual.fireEventually(13) d.addCallback(self.cb) return self.assertResults([(13,)]) buildbot-2.6.0/master/buildbot/test/unit/test_util_git.py000066400000000000000000000124261361162603000236140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized from twisted.trial import unittest from buildbot.test.util import config from buildbot.util.git import GitMixin from buildbot.util.git import escapeShellArgIfNeeded from buildbot.util.git import getSshKnownHostsContents class TestEscapeShellArgIfNeeded(unittest.TestCase): def assert_escapes(self, arg): escaped = '"{}"'.format(arg) self.assertEqual(escapeShellArgIfNeeded(arg), escaped) def assert_does_not_escape(self, arg): self.assertEqual(escapeShellArgIfNeeded(arg), arg) def test_empty(self): self.assert_escapes('') def test_spaces(self): self.assert_escapes(' ') self.assert_escapes('a ') self.assert_escapes(' a') self.assert_escapes('a b') def test_special(self): self.assert_escapes('a=b') self.assert_escapes('a%b') self.assert_escapes('a(b') self.assert_escapes('a[b') def test_no_escape(self): self.assert_does_not_escape('abc') self.assert_does_not_escape('a_b') self.assert_does_not_escape('-opt') self.assert_does_not_escape('--opt') class TestSetUpGit(GitMixin, unittest.TestCase, config.ConfigErrorsMixin): @parameterized.expand([ ('no_keys', None, None, None, None), ('only_private_key', 'key', None, None, None), ('private_key_host_key', 'key', 'host', None, None), ('private_key_known_hosts', 'key', None, 'hosts', None), ('no_private_key_host_key', None, 'host', None, 'sshPrivateKey must be provided in order use sshHostKey'), ('no_private_key_known_hosts', None, None, 'hosts', 'sshPrivateKey must be provided in order use sshKnownHosts'), ('both_host_key_known_hosts', 'key', 'host', 'hosts', 'only one of sshPrivateKey and sshHostKey can be provided'), ]) def test_config(self, name, private_key, host_key, known_hosts, config_error): self.sshPrivateKey = private_key self.sshHostKey = host_key self.sshKnownHosts = known_hosts if config_error is None: self.setupGit() else: with self.assertRaisesConfigError(config_error): self.setupGit() class TestParseGitFeatures(GitMixin, unittest.TestCase): def setUp(self): self.sshPrivateKey = None self.sshHostKey = None self.sshKnownHosts = None self.setupGit() def test_no_output(self): self.parseGitFeatures('') self.assertFalse(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_noversion(self): self.parseGitFeatures('git') self.assertFalse(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_zero_version(self): self.parseGitFeatures('git version 0.0.0') self.assertTrue(self.gitInstalled) self.assertFalse(self.supportsBranch) self.assertFalse(self.supportsSubmoduleForce) self.assertFalse(self.supportsSubmoduleCheckout) self.assertFalse(self.supportsSshPrivateKeyAsEnvOption) self.assertFalse(self.supportsSshPrivateKeyAsConfigOption) def test_git_2_10_0(self): self.parseGitFeatures('git version 2.10.0') self.assertTrue(self.gitInstalled) self.assertTrue(self.supportsBranch) self.assertTrue(self.supportsSubmoduleForce) self.assertTrue(self.supportsSubmoduleCheckout) self.assertTrue(self.supportsSshPrivateKeyAsEnvOption) self.assertTrue(self.supportsSshPrivateKeyAsConfigOption) class TestAdjustCommandParamsForSshPrivateKey(GitMixin, unittest.TestCase): def test_throws_when_wrapper_not_given(self): self.gitInstalled = True command = [] env = {} with self.assertRaises(Exception): self.adjustCommandParamsForSshPrivateKey(command, env, 'path/to/key') class TestGetSshKnownHostsContents(unittest.TestCase): def test(self): key = 'ssh-rsa AAAA<...>WsHQ==' expected = '* ssh-rsa AAAA<...>WsHQ==' self.assertEqual(expected, getSshKnownHostsContents(key)) buildbot-2.6.0/master/buildbot/test/unit/test_util_giturlparse.py000066400000000000000000000113741361162603000253730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import giturlparse class Tests(unittest.TestCase): def test_github(self): for u in [ "https://github.com/buildbot/buildbot", "https://github.com/buildbot/buildbot.git", "ssh://git@github.com:buildbot/buildbot.git", "git://github.com/buildbot/buildbot.git"]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "github.com") self.assertEqual(u.owner, "buildbot") self.assertEqual(u.repo, "buildbot") self.assertIsNone(u.port) def test_gitlab(self): for u in [ "ssh://git@mygitlab.com/group/subgrouptest/testproject.git", "https://mygitlab.com/group/subgrouptest/testproject.git", "git@mygitlab.com:group/subgrouptest/testproject.git", "git://mygitlab.com/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertIsNone(u.port) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_gitlab_subsubgroup(self): for u in [ "ssh://git@mygitlab.com/group/subgrouptest/subsubgroup/testproject.git", "https://mygitlab.com/group/subgrouptest/subsubgroup/testproject.git", "git://mygitlab.com/group/subgrouptest/subsubgroup/testproject.git"]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertIsNone(u.port) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.owner, "group/subgrouptest/subsubgroup") self.assertEqual(u.repo, "testproject") def test_gitlab_user(self): for u in [ "ssh://buildbot@mygitlab.com:group/subgrouptest/testproject.git", "https://buildbot@mygitlab.com/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertEqual(u.domain, "mygitlab.com") self.assertIsNone(u.port) self.assertEqual(u.user, "buildbot") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_gitlab_port(self): for u in [ "ssh://buildbot@mygitlab.com:1234/group/subgrouptest/testproject.git"]: u = giturlparse(u) self.assertEqual(u.domain, "mygitlab.com") self.assertEqual(u.port, 1234) self.assertEqual(u.user, "buildbot") self.assertEqual(u.owner, "group/subgrouptest") self.assertEqual(u.repo, "testproject") def test_bitbucket(self): for u in [ "https://bitbucket.org/org/repo.git", "ssh://git@bitbucket.org:org/repo.git", "git@bitbucket.org:org/repo.git", ]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "bitbucket.org") self.assertEqual(u.owner, "org") self.assertEqual(u.repo, "repo") def test_no_owner(self): for u in [ "https://example.org/repo.git", "ssh://example.org:repo.git", "ssh://git@example.org:repo.git", "git@example.org:repo.git", ]: u = giturlparse(u) self.assertIn(u.user, (None, "git")) self.assertEqual(u.domain, "example.org") self.assertIsNone(u.owner) self.assertEqual(u.repo, "repo") def test_protos(self): self.assertEqual(giturlparse("https://bitbucket.org/org/repo.git").proto, "https") self.assertEqual(giturlparse("git://bitbucket.org/org/repo.git").proto, "git") self.assertEqual(giturlparse("ssh://git@bitbucket.org:org/repo.git").proto, "ssh") self.assertEqual(giturlparse("git@bitbucket.org:org/repo.git").proto, "ssh") buildbot-2.6.0/master/buildbot/test/unit/test_util_httpclientservice.py000066400000000000000000000402141361162603000265640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json import os import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python import components from twisted.python.compat import intToBytes from twisted.trial import unittest from twisted.web import resource from twisted.web import server from buildbot import interfaces from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import unicode2bytes try: from requests.auth import HTTPDigestAuth except ImportError: pass # There is no way to unregister an adapter, so we have no other option # than registering it as a module side effect :-( components.registerAdapter( lambda m: m, mock.Mock, interfaces.IHttpResponse) class HTTPClientServiceTestBase(unittest.SynchronousTestCase): def setUp(self): if httpclientservice.txrequests is None or httpclientservice.treq is None: raise unittest.SkipTest('this test requires txrequests and treq') self.patch(httpclientservice, 'txrequests', mock.Mock()) self.patch(httpclientservice, 'treq', mock.Mock()) self.parent = service.MasterService() self.parent.reactor = reactor self.base_headers = {} self.successResultOf(self.parent.startService()) class HTTPClientServiceTestTxRequest(HTTPClientServiceTestBase): def setUp(self): super().setUp() self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', headers=self.base_headers)) def test_get(self): self._http.get('/bar') self._http._session.request.assert_called_once_with('get', 'http://foo/bar', headers={}, background_callback=mock.ANY) def test_put(self): self._http.put('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('put', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers={'Content-Type': 'application/json'}) def test_post(self): self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers={'Content-Type': 'application/json'}) def test_delete(self): self._http.delete('/bar') self._http._session.request.assert_called_once_with('delete', 'http://foo/bar', background_callback=mock.ANY, headers={}) def test_post_headers(self): self.base_headers.update({'X-TOKEN': 'XXXYYY'}) self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers={ 'X-TOKEN': 'XXXYYY', 'Content-Type': 'application/json'}) def test_post_auth(self): self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=('user', 'pa$$'))) self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, auth=( 'user', 'pa$$'), headers={ 'Content-Type': 'application/json' }) class HTTPClientServiceTestTReq(HTTPClientServiceTestBase): def setUp(self): super().setUp() self.patch(httpclientservice.HTTPClientService, 'PREFER_TREQ', True) self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', headers=self.base_headers)) def test_get(self): self._http.get('/bar') httpclientservice.treq.get.assert_called_once_with('http://foo/bar', agent=mock.ANY, headers={}) def test_put(self): self._http.put('/bar', json={'foo': 'bar'}) httpclientservice.treq.put.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers={'Content-Type': ['application/json']}) def test_post(self): self._http.post('/bar', json={'foo': 'bar'}) httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers={'Content-Type': ['application/json']}) def test_delete(self): self._http.delete('/bar') httpclientservice.treq.delete.assert_called_once_with('http://foo/bar', agent=mock.ANY, headers={}) def test_post_headers(self): self.base_headers.update({'X-TOKEN': 'XXXYYY'}) self._http.post('/bar', json={'foo': 'bar'}) httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', headers={ 'Content-Type': ['application/json'], 'X-TOKEN': ['XXXYYY']}) def test_post_auth(self): self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=('user', 'pa$$'))) self._http.post('/bar', json={'foo': 'bar'}) httpclientservice.treq.post.assert_called_once_with('http://foo/bar', agent=mock.ANY, data=b'{"foo": "bar"}', auth=( 'user', 'pa$$'), headers={ 'Content-Type': ['application/json'], }) def test_post_auth_digest(self): auth = HTTPDigestAuth('user', 'pa$$') self._http = self.successResultOf( httpclientservice.HTTPClientService.getService(self.parent, 'http://foo', auth=auth)) self._http.post('/bar', data={'foo': 'bar'}) # if digest auth, we don't use treq! we use txrequests self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=dict( foo='bar'), auth=auth, headers={ }) class MyResource(resource.Resource): isLeaf = True def render_GET(self, request): def decode(x): if isinstance(x, bytes): return bytes2unicode(x) elif isinstance(x, (list, tuple)): return [bytes2unicode(y) for y in x] elif isinstance(x, dict): newArgs = {} for a, b in x.items(): newArgs[decode(a)] = decode(b) return newArgs return x args = decode(request.args) content_type = request.getHeader(b'content-type') if content_type == b"application/json": jsonBytes = request.content.read() jsonStr = bytes2unicode(jsonBytes) args['json_received'] = json.loads(jsonStr) data = json.dumps(args) data = unicode2bytes(data) request.setHeader(b'content-type', b'application/json') request.setHeader(b'content-length', intToBytes(len(data))) if request.method == b'HEAD': return b'' return data render_HEAD = render_GET render_POST = render_GET class HTTPClientServiceTestTxRequestE2E(unittest.TestCase): """The e2e tests must be the same for txrequests and treq We just force treq in the other TestCase """ def httpFactory(self, parent): return httpclientservice.HTTPClientService.getService( parent, 'http://127.0.0.1:{}'.format(self.port)) def expect(self, *arg, **kwargs): pass @defer.inlineCallbacks def setUp(self): if httpclientservice.txrequests is None or httpclientservice.treq is None: raise unittest.SkipTest('this test requires txrequests and treq') site = server.Site(MyResource()) self.listenport = reactor.listenTCP(0, site) self.port = self.listenport.getHost().port self.parent = parent = service.MasterService() self.parent.reactor = reactor yield parent.startService() self._http = yield self.httpFactory(parent) @defer.inlineCallbacks def tearDown(self): self.listenport.stopListening() yield self.parent.stopService() @defer.inlineCallbacks def test_content(self): self.expect('get', '/', content_json={}) res = yield self._http.get('/') content = yield res.content() self.assertEqual(content, b'{}') @defer.inlineCallbacks def test_content_with_params(self): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.get('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_post_content_with_params(self): self.expect('post', '/', params=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.post('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_put_content_with_data(self): self.expect('post', '/', data=dict(a='b'), content_json=dict(a=['b'])) res = yield self._http.post('/', data=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_put_content_with_json(self): exp_content_json = dict(json_received=dict(a='b')) self.expect('post', '/', json=dict(a='b'), content_json=exp_content_json) res = yield self._http.post('/', json=dict(a='b')) content = yield res.content() content = bytes2unicode(content) content = json.loads(content) self.assertEqual(content, exp_content_json) @defer.inlineCallbacks def test_put_content_with_json_datetime(self): exp_content_json = dict(json_received=dict(a='b', ts=12)) dt = datetime.datetime.utcfromtimestamp(12) self.expect('post', '/', json=dict(a='b', ts=dt), content_json=exp_content_json) res = yield self._http.post('/', json=dict(a='b', ts=dt)) content = yield res.content() content = bytes2unicode(content) content = json.loads(content) self.assertEqual(content, exp_content_json) @defer.inlineCallbacks def test_json(self): self.expect('get', '/', content_json={}) res = yield self._http.get('/') content = yield res.json() self.assertEqual(content, {}) self.assertEqual(res.code, 200) # note that freebsd workers will not like when there are too many parallel connections # we can change this test via environment variable NUM_PARALLEL = os.environ.get("BBTEST_NUM_PARALLEL", 5) @defer.inlineCallbacks def test_lots(self): for i in range(self.NUM_PARALLEL): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) # use for benchmarking (txrequests: 3ms per request treq: 1ms per # request) for i in range(self.NUM_PARALLEL): res = yield self._http.get('/', params=dict(a='b')) content = yield res.content() self.assertEqual(content, b'{"a": ["b"]}') @defer.inlineCallbacks def test_lots_parallel(self): for i in range(self.NUM_PARALLEL): self.expect('get', '/', params=dict(a='b'), content_json=dict(a=['b'])) # use for benchmarking (txrequests: 3ms per request treq: 11ms per # request (!?)) def oneReq(): d = self._http.get('/', params=dict(a='b')) @d.addCallback def content(res): return res.content() return d dl = [oneReq() for i in range(self.NUM_PARALLEL)] yield defer.gatherResults(dl) class HTTPClientServiceTestTReqE2E(HTTPClientServiceTestTxRequestE2E): def setUp(self): self.patch(httpclientservice.HTTPClientService, 'PREFER_TREQ', True) return super().setUp() class HTTPClientServiceTestFakeE2E(HTTPClientServiceTestTxRequestE2E): def httpFactory(self, parent): return fakehttpclientservice.HTTPClientService.getService( parent, 'http://127.0.0.1:{}'.format(self.port)) def expect(self, *arg, **kwargs): self._http.expect(*arg, **kwargs) buildbot-2.6.0/master/buildbot/test/unit/test_util_identifiers.py000066400000000000000000000100471361162603000253330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import locale from twisted.python import log from twisted.trial import unittest from buildbot.util import identifiers class Tests(unittest.TestCase): def test_isIdentifier(self): os_encoding = locale.getpreferredencoding() try: '\N{SNOWMAN}'.encode(os_encoding) except UnicodeEncodeError: # Default encoding of Windows console is 'cp1252' # which cannot encode the snowman. raise(unittest.SkipTest("Cannot encode weird unicode " "on this platform with {}".format(os_encoding))) good = [ "linux", "Linux", "abc123", "a" * 50, '\N{SNOWMAN}' ] for g in good: log.msg('expect %r to be good' % (g,)) self.assertTrue(identifiers.isIdentifier(50, g)) bad = [ None, '', b'linux', 'a/b', "a.b.c.d", "a-b_c.d9", 'spaces not allowed', "a" * 51, "123 no initial digits", '\N{SNOWMAN}.\N{SNOWMAN}', ] for b in bad: log.msg('expect %r to be bad' % (b,)) self.assertFalse(identifiers.isIdentifier(50, b)) def assertEqualUnicode(self, got, exp): self.assertTrue(isinstance(exp, str)) self.assertEqual(got, exp) def test_forceIdentifier_already_is(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abc'), 'abc') def test_forceIdentifier_ascii(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abc'), 'abc') def test_forceIdentifier_too_long(self): self.assertEqualUnicode( identifiers.forceIdentifier(10, 'abcdefghijKL'), 'abcdefghij') def test_forceIdentifier_invalid_chars(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, 'my log.html'), 'my_log_html') def test_forceIdentifier_leading_digit(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, '9 pictures of cats.html'), '__pictures_of_cats_html') def test_forceIdentifier_digits(self): self.assertEqualUnicode( identifiers.forceIdentifier(100, 'warnings(2000)'), 'warnings_2000_') def test_incrementIdentifier_simple(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa'), 'aaa_2') def test_incrementIdentifier_simple_way_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(3, 'aaa'), 'a_2') def test_incrementIdentifier_simple_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(4, 'aaa'), 'aa_2') def test_incrementIdentifier_single_digit(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa_2'), 'aaa_3') def test_incrementIdentifier_add_digits(self): self.assertEqualUnicode( identifiers.incrementIdentifier(100, 'aaa_99'), 'aaa_100') def test_incrementIdentifier_add_digits_too_long(self): self.assertEqualUnicode( identifiers.incrementIdentifier(6, 'aaa_99'), 'aa_100') def test_incrementIdentifier_add_digits_out_of_space(self): with self.assertRaises(ValueError): identifiers.incrementIdentifier(6, '_99999') buildbot-2.6.0/master/buildbot/test/unit/test_util_interfaces.py000066400000000000000000000056411361162603000251550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import interfaces class TestAssertArgSpecMatches(interfaces.InterfaceTests, unittest.TestCase): def test_simple_decorator(self): def myfunc(x, y=2, *args): pass @self.assertArgSpecMatches(myfunc) def myfunc2(x, y=2, *args): pass try: @self.assertArgSpecMatches(myfunc) def myfunc3(x, y=3, *args): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual( error.args, ('Expected: (x, y=3, *args); got: (x, y=2, *args)',)) def test_double_decorator(self): def myfunc(x, y): pass def myfunc2(x, y): pass def myfunc3(x, yy): pass @self.assertArgSpecMatches(myfunc, myfunc2) def myfunc4(x, y): pass try: @self.assertArgSpecMatches(myfunc, myfunc3) def myfunc5(x, y): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual(error.args, ('Expected: (x, y); got: (x, yy)',)) try: @self.assertArgSpecMatches(myfunc, myfunc3) def myfunc6(xx, yy): pass except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual(error.args, ('Expected: (x, y); got: (x, yy)',)) def test_function_style(self): def myfunc(x, y=2, *args): pass def myfunc2(x, y=2, *args): pass def myfunc3(x, y=3, *args): pass self.assertArgSpecMatches(myfunc, myfunc2) try: self.assertArgSpecMatches(myfunc, myfunc3) except Exception as e: error = e else: error = None self.assertIdentical(type(error), unittest.FailTest) self.assertEqual( error.args, ('Expected: (x, y=2, *args); got: (x, y=3, *args)',)) buildbot-2.6.0/master/buildbot/test/unit/test_util_kubeclientservice.py000066400000000000000000000352521361162603000265410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import base64 import copy import os import sys import textwrap from io import StringIO from unittest.case import SkipTest import yaml import mock from twisted.internet import defer from twisted.python import runtime from twisted.trial import unittest from buildbot.process.properties import Interpolate from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttp from buildbot.test.fake import kube as fakekube from buildbot.test.util import config from buildbot.test.util.misc import TestReactorMixin from buildbot.util import kubeclientservice class MockFileBase: file_mock_config = {} def setUp(self): self.patcher = mock.patch('buildbot.util.kubeclientservice.open', self.mock_open) self.patcher.start() def tearDown(self): self.patcher.stop() def mock_open(self, filename, mode=None, encoding='UTF-8'): filename_type = os.path.basename(filename) file_value = self.file_mock_config[filename_type] mock_open = mock.Mock( __enter__=mock.Mock(return_value=StringIO(file_value)), __exit__=mock.Mock()) return mock_open class KubeClientServiceTestClusterConfig( MockFileBase, config.ConfigErrorsMixin, unittest.SynchronousTestCase): file_mock_config = { 'token': 'BASE64_TOKEN', 'namespace': 'buildbot_namespace' } def setUp(self): super().setUp() self.patch(kubeclientservice.os, 'environ', {'KUBERNETES_PORT': 'tcp://foo'}) def patchExist(self, val): self.patch(kubeclientservice.os.path, 'exists', lambda x: val) def test_not_exists(self): self.patchExist(False) with self.assertRaisesConfigError('kube_dir not found:'): kubeclientservice.KubeInClusterConfigLoader() def test_basic(self): self.patchExist(True) config = kubeclientservice.KubeInClusterConfigLoader() self.successResultOf(config.startService()) self.assertEqual( config.getConfig(), { 'headers': { 'Authorization': 'Bearer BASE64_TOKEN' }, 'master_url': 'https://foo', 'namespace': 'buildbot_namespace', 'verify': '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' }) KUBE_CTL_PROXY_FAKE = """ import time import sys print("Starting to serve on 127.0.0.1:" + sys.argv[2]) sys.stdout.flush() time.sleep(1000) """ KUBE_CTL_PROXY_FAKE_ERROR = """ import time import sys print("Issue with the config!", file=sys.stderr) sys.stderr.flush() sys.exit(1) """ class KubeClientServiceTestKubeHardcodedConfig(config.ConfigErrorsMixin, unittest.TestCase): def test_basic(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default" ) self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8001', 'namespace': 'default', 'headers': {} }) @defer.inlineCallbacks def test_verify_is_forwarded_to_keywords(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem" ) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual('/path/to/pem', kwargs['verify']) @defer.inlineCallbacks def test_verify_headers_are_passed_to_the_query(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", headers={'Test': '10'} ) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual({'Test': '10'}, kwargs['headers']) def test_the_configuration_parent_is_set_to_the_service(self): # This is needed to allow secret expansion self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001") service = kubeclientservice.KubeClientService(config) self.assertEqual(service, self.config.parent) def test_cannot_pass_both_bearer_and_basic_auth(self): with self.assertRaises(Exception): kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", basicAuth="Bla", bearerToken="Bla") @defer.inlineCallbacks def test_verify_bearerToken_is_expanded(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", bearerToken=Interpolate("%(kw:test)s", test=10)) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) self.assertEqual("Bearer 10", kwargs['headers']['Authorization']) @defer.inlineCallbacks def test_verify_basicAuth_is_expanded(self): self.config = config = kubeclientservice.KubeHardcodedConfig( master_url="http://localhost:8001", namespace="default", verify="/path/to/pem", basicAuth={'user': 'name', 'password': Interpolate("%(kw:test)s", test=10)}) service = kubeclientservice.KubeClientService(config) url, kwargs = yield service._prepareRequest("/test", {}) expected = "Basic {0}".format(base64.b64encode("name:10".encode('utf-8'))) self.assertEqual(expected, kwargs['headers']['Authorization']) class KubeClientServiceTestKubeCtlProxyConfig(config.ConfigErrorsMixin, unittest.TestCase): def patchProxyCmd(self, cmd): if runtime.platformType != 'posix': self.config = None raise SkipTest('only posix platform is supported by this test') self.patch(kubeclientservice.KubeCtlProxyConfigLoader, 'kube_ctl_proxy_cmd', [sys.executable, "-c", cmd]) def tearDown(self): if self.config is not None: return self.config.stopService() @defer.inlineCallbacks def test_basic(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader() yield config.startService() self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8001', 'namespace': 'default' }) @defer.inlineCallbacks def test_config_args(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader( proxy_port=8002, namespace="system") yield config.startService() self.assertEqual(config.kube_proxy_output, b'Starting to serve on 127.0.0.1:8002') self.assertEqual(config.getConfig(), { 'master_url': 'http://localhost:8002', 'namespace': 'system' }) yield config.stopService() @defer.inlineCallbacks def test_config_with_error(self): self.patchProxyCmd(KUBE_CTL_PROXY_FAKE_ERROR) self.config = config = kubeclientservice.KubeCtlProxyConfigLoader() with self.assertRaises(RuntimeError): yield config.startService() # integration tests for KubeClientService class RealKubeClientServiceTest(TestReactorMixin, unittest.TestCase): timeout = 200 POD_SPEC = yaml.safe_load( textwrap.dedent(""" apiVersion: v1 kind: Pod metadata: name: pod-example spec: containers: - name: alpine image: alpine command: ["sleep"] args: ["100"] """)) def createKube(self): if "TEST_KUBERNETES" not in os.environ: raise SkipTest( "kubernetes integration tests only run when environment " "variable TEST_KUBERNETES is set") self.kube = kubeclientservice.KubeClientService( kubeclientservice.KubeCtlProxyConfigLoader()) def expect(self, *args, **kwargs): pass @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.createKube() yield self.kube.setServiceParent(self.master) yield self.master.startService() def tearDown(self): return self.master.stopService() kube = None @defer.inlineCallbacks def test_create_and_delete_pod(self): content = {'kind': 'Pod', 'metadata': {'name': 'pod-example'}} self.expect( method='post', ep='/api/v1/namespaces/default/pods', params=None, data=None, json={ 'apiVersion': 'v1', 'kind': 'Pod', 'metadata': { 'name': 'pod-example' }, 'spec': { 'containers': [{ 'name': 'alpine', 'image': 'alpine', 'command': ['sleep'], 'args': ['100'] }] } }, content_json=content) res = yield self.kube.createPod(self.kube.namespace, self.POD_SPEC) self.assertEqual(res['kind'], 'Pod') self.assertEqual(res['metadata']['name'], 'pod-example') self.assertNotIn('deletionTimestamp', res['metadata']) content['metadata']['deletionTimestamp'] = 'now' self.expect( method='delete', ep='/api/v1/namespaces/default/pods/pod-example', params={'graceperiod': 0}, data=None, json=None, code=200, content_json=content) res = yield self.kube.deletePod(self.kube.namespace, 'pod-example') self.assertEqual(res['kind'], 'Pod') self.assertIn('deletionTimestamp', res['metadata']) # first time present self.expect( method='get', ep='/api/v1/namespaces/default/pods/pod-example/status', params=None, data=None, json=None, code=200, content_json=content) # second time deleted content = {'kind': 'Status', 'reason': 'NotFound'} self.expect( method='get', ep='/api/v1/namespaces/default/pods/pod-example/status', params=None, data=None, json=None, code=404, content_json=content) res = yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=200) self.assertEqual(res['kind'], 'Status') self.assertEqual(res['reason'], 'NotFound') @defer.inlineCallbacks def test_create_bad_spec(self): spec = copy.deepcopy(self.POD_SPEC) del spec['metadata'] content = { 'kind': 'Status', 'reason': 'MissingName', 'message': 'need name' } self.expect( method='post', ep='/api/v1/namespaces/default/pods', params=None, data=None, json={ 'apiVersion': 'v1', 'kind': 'Pod', 'spec': { 'containers': [{ 'name': 'alpine', 'image': 'alpine', 'command': ['sleep'], 'args': ['100'] }] } }, code=400, content_json=content) with self.assertRaises(kubeclientservice.KubeError): yield self.kube.createPod(self.kube.namespace, spec) @defer.inlineCallbacks def test_delete_not_existing(self): content = { 'kind': 'Status', 'reason': 'NotFound', 'message': 'no container by that name' } self.expect( method='delete', ep='/api/v1/namespaces/default/pods/pod-example', params={'graceperiod': 0}, data=None, json=None, code=404, content_json=content) with self.assertRaises(kubeclientservice.KubeError): yield self.kube.deletePod(self.kube.namespace, 'pod-example') @defer.inlineCallbacks def test_wait_for_delete_not_deleting(self): yield self.kube.createPod(self.kube.namespace, self.POD_SPEC) with self.assertRaises(TimeoutError): yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=2) res = yield self.kube.deletePod(self.kube.namespace, 'pod-example') self.assertEqual(res['kind'], 'Pod') self.assertIn('deletionTimestamp', res['metadata']) yield self.kube.waitForPodDeletion( self.kube.namespace, 'pod-example', timeout=100) class FakeKubeClientServiceTest(RealKubeClientServiceTest): def createKube(self): self.kube = fakekube.KubeClientService( kubeclientservice.KubeHardcodedConfig(master_url='http://m')) class PatchedKubeClientServiceTest(RealKubeClientServiceTest): def createKube(self): self.kube = kubeclientservice.KubeClientService( kubeclientservice.KubeHardcodedConfig(master_url='http://m')) self.http = fakehttp.HTTPClientService('http://m') self.kube.get = self.http.get self.kube.post = self.http.post self.kube.put = self.http.put self.kube.delete = self.http.delete def expect(self, *args, **kwargs): return self.http.expect(*args, **kwargs) def test_wait_for_delete_not_deleting(self): # no need to describe the expect flow for that case pass buildbot-2.6.0/master/buildbot/test/unit/test_util_lineboundaries.py000066400000000000000000000127421361162603000260350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.trial import unittest from buildbot.util import lineboundaries class LBF(unittest.TestCase): def setUp(self): self.callbacks = [] self.lbf = lineboundaries.LineBoundaryFinder(self._callback) def _callback(self, wholeLines): self.assertEqual(wholeLines[-1], '\n', 'got %r' % (wholeLines)) self.callbacks.append(wholeLines) d = defer.Deferred() reactor.callLater(0, d.callback, None) return d def assertCallbacks(self, callbacks): self.assertEqual(self.callbacks, callbacks) self.callbacks = [] # tests @defer.inlineCallbacks def test_already_terminated(self): yield self.lbf.append('abcd\ndefg\n') self.assertCallbacks(['abcd\ndefg\n']) yield self.lbf.append('xyz\n') self.assertCallbacks(['xyz\n']) yield self.lbf.flush() self.assertCallbacks([]) @defer.inlineCallbacks def test_partial_line(self): for c in "hello\nworld": yield self.lbf.append(c) self.assertCallbacks(['hello\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_empty_appends(self): yield self.lbf.append('hello ') yield self.lbf.append('') yield self.lbf.append('world\n') yield self.lbf.append('') self.assertCallbacks(['hello world\n']) @defer.inlineCallbacks def test_embedded_newlines(self): yield self.lbf.append('hello, ') self.assertCallbacks([]) yield self.lbf.append('cruel\nworld') self.assertCallbacks(['hello, cruel\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_windows_newlines_folded(self): r"Windows' \r\n is treated as and converted to a newline" yield self.lbf.append('hello, ') self.assertCallbacks([]) yield self.lbf.append('cruel\r\n\r\nworld') self.assertCallbacks(['hello, cruel\n\n']) yield self.lbf.flush() self.assertCallbacks(['world\n']) @defer.inlineCallbacks def test_bare_cr_folded(self): r"a bare \r is treated as and converted to a newline" yield self.lbf.append('1%\r5%\r15%\r100%\nfinished') yield self.lbf.flush() self.assertCallbacks(['1%\n5%\n15%\n100%\n', 'finished\n']) @defer.inlineCallbacks def test_backspace_folded(self): r"a lot of \b is treated as and converted to a newline" yield self.lbf.append('1%\b\b5%\b\b15%\b\b\b100%\nfinished') yield self.lbf.flush() self.assertCallbacks(['1%\n5%\n15%\n100%\n', 'finished\n']) @defer.inlineCallbacks def test_mixed_consecutive_newlines(self): r"mixing newline styles back-to-back doesn't collapse them" yield self.lbf.append('1\r\n\n\r') self.assertCallbacks(['1\n\n']) # last \r is delayed until flush yield self.lbf.append('2\n\r\n') self.assertCallbacks(['\n2\n\n']) @defer.inlineCallbacks def test_split_newlines(self): r"multi-character newlines, split across chunks, are converted" input = 'a\nb\r\nc\rd\n\re' for splitpoint in range(1, len(input) - 1): a, b = input[:splitpoint], input[splitpoint:] yield self.lbf.append(a) yield self.lbf.append(b) yield self.lbf.flush() res = ''.join(self.callbacks) log.msg('feeding %r, %r gives %r' % (a, b, res)) self.assertEqual(res, 'a\nb\nc\nd\n\ne\n') self.callbacks = [] @defer.inlineCallbacks def test_split_terminal_control(self): """terminal control characters are converted""" yield self.lbf.append('1234\033[u4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) yield self.lbf.append('1234\033[1;2H4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) yield self.lbf.append('1234\033[1;2f4321') yield self.lbf.flush() self.assertCallbacks(['1234\n', '4321\n']) @defer.inlineCallbacks def test_long_lines(self): """long lines are split""" for i in range(4): yield self.lbf.append('12' * 1000) # a split at 4096 + the remaining chars self.assertCallbacks(['12' * 2048 + '\n' + '12' * 952 + '\n']) @defer.inlineCallbacks def test_huge_lines(self): """huge lines are split""" yield self.lbf.append('12' * 32768) yield self.lbf.flush() self.assertCallbacks([('12' * 2048 + '\n') * 16]) @defer.inlineCallbacks def test_empty_flush(self): yield self.lbf.flush() self.assertEqual(self.callbacks, []) buildbot-2.6.0/master/buildbot/test/unit/test_util_lru.py000066400000000000000000000431351361162603000236340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import gc import random import string from twisted.internet import defer from twisted.internet import reactor from twisted.python import failure from twisted.trial import unittest from buildbot.util import lru # construct weakref-able objects for particular keys def short(k): return set([k.upper() * 3]) def long(k): return set([k.upper() * 6]) class LRUCacheTest(unittest.TestCase): def setUp(self): lru.inv_failed = False self.lru = lru.LRUCache(short, 3) def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") def check_result(self, r, exp, exp_hits=None, exp_misses=None, exp_refhits=None): self.assertEqual(r, exp) if exp_hits is not None: self.assertEqual(self.lru.hits, exp_hits) if exp_misses is not None: self.assertEqual(self.lru.misses, exp_misses) if exp_refhits is not None: self.assertEqual(self.lru.refhits, exp_refhits) def test_single_key(self): # just get an item val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) # second time, it should be cached.. self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, short('a'), 1, 1) def test_simple_lru_expulsion(self): val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = self.lru.get('b') self.check_result(val, short('b'), 0, 2) val = self.lru.get('c') self.check_result(val, short('c'), 0, 3) val = self.lru.get('d') self.check_result(val, short('d'), 0, 4) del(val) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, long('a'), 0, 5) # ..and that expelled B, but C is still in the cache val = self.lru.get('c') self.check_result(val, short('c'), 1, 5) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1(self): self.lru = lru.LRUCache(short, 1) val = yield self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = yield self.lru.get('a') self.check_result(val, short('a'), 1, 1) val = yield self.lru.get('b') self.check_result(val, short('b'), 1, 2) del(val) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = long val = yield self.lru.get('a') self.check_result(val, long('a'), 1, 3) del(val) gc.collect() # ..and that expelled B val = yield self.lru.get('b') self.check_result(val, long('b'), 1, 4) def test_simple_lru_expulsion_maxsize_1_null_result(self): # a regression test for #2011 def miss_fn(k): if k == 'b': return None return short(k) self.lru = lru.LRUCache(miss_fn, 1) val = self.lru.get('a') self.check_result(val, short('a'), 0, 1) val = self.lru.get('b') self.check_result(val, None, 0, 2) del(val) # 'a' was not expelled since 'b' was None self.lru.miss_fn = long val = self.lru.get('a') self.check_result(val, short('a'), 1, 2) def test_queue_collapsing(self): # just to check that we're practicing with the right queue size (so # QUEUE_SIZE_FACTOR is 10) self.assertEqual(self.lru.max_queue, 30) for c in 'a' + 'x' * 27 + 'ab': res = self.lru.get(c) self.check_result(res, short('b'), 27, 3) # at this point, we should have 'x', 'a', and 'b' in the cache, and # 'axx..xxab' in the queue. self.assertEqual(len(self.lru.queue), 30) # This 'get' operation for an existing key should cause compaction res = self.lru.get('b') self.check_result(res, short('b'), 28, 3) self.assertEqual(len(self.lru.queue), 3) # expect a cached short('a') self.lru.miss_fn = long res = self.lru.get('a') self.check_result(res, short('a'), 29, 3) def test_all_misses(self): for i, c in enumerate(string.ascii_lowercase + string.ascii_uppercase): res = self.lru.get(c) self.check_result(res, short(c), 0, i + 1) def test_get_exception(self): def fail_miss_fn(k): raise RuntimeError("oh noes") self.lru.miss_fn = fail_miss_fn got_exc = False try: self.lru.get('abc') except RuntimeError: got_exc = True self.assertEqual(got_exc, True) def test_all_hits(self): res = self.lru.get('a') self.check_result(res, short('a'), 0, 1) self.lru.miss_fn = long for i in range(100): res = self.lru.get('a') self.check_result(res, short('a'), i + 1, 1) def test_weakrefs(self): res_a = self.lru.get('a') self.check_result(res_a, short('a')) # note that res_a keeps a reference to this value res_b = self.lru.get('b') self.check_result(res_b, short('b')) del res_b # discard reference to b # blow out the cache and the queue self.lru.miss_fn = long for c in (string.ascii_lowercase[2:] * 5): self.lru.get(c) # and fetch a again, expecting the cached value res = self.lru.get('a') self.check_result(res, res_a, exp_refhits=1) # but 'b' should give us a new value res = self.lru.get('b') self.check_result(res, long('b'), exp_refhits=1) def test_fuzz(self): chars = list(string.ascii_lowercase * 40) random.shuffle(chars) for i, c in enumerate(chars): res = self.lru.get(c) self.check_result(res, short(c)) def test_set_max_size(self): # load up the cache with three items for c in 'abc': res = self.lru.get(c) self.check_result(res, short(c)) del(res) # reset the size to 1 self.lru.set_max_size(1) gc.collect() # and then expect that 'b' is no longer in the cache self.lru.miss_fn = long res = self.lru.get('b') self.check_result(res, long('b')) def test_miss_fn_kwargs(self): def keep_kwargs_miss_fn(k, **kwargs): return set(kwargs.keys()) self.lru.miss_fn = keep_kwargs_miss_fn val = self.lru.get('a', a=1, b=2) self.check_result(val, set(['a', 'b']), 0, 1) def test_miss_fn_returns_none(self): calls = [] def none_miss_fn(k): calls.append(k) return None self.lru.miss_fn = none_miss_fn for i in range(2): self.assertEqual(self.lru.get('a'), None) # check that the miss_fn was called twice self.assertEqual(calls, ['a', 'a']) def test_put(self): self.assertEqual(self.lru.get('p'), short('p')) self.lru.put('p', set(['P2P2'])) self.assertEqual(self.lru.get('p'), set(['P2P2'])) def test_put_nonexistent_key(self): self.assertEqual(self.lru.get('p'), short('p')) self.lru.put('q', set(['new-q'])) self.assertEqual(self.lru.get('p'), set(['PPP'])) self.assertEqual(self.lru.get('q'), set(['new-q'])) # updated class AsyncLRUCacheTest(unittest.TestCase): def setUp(self): lru.inv_failed = False self.lru = lru.AsyncLRUCache(self.short_miss_fn, 3) def tearDown(self): self.assertFalse(lru.inv_failed, "invariant failed; see logs") def short_miss_fn(self, key): return defer.succeed(short(key)) def long_miss_fn(self, key): return defer.succeed(long(key)) def failure_miss_fn(self, key): return defer.succeed(None) def check_result(self, r, exp, exp_hits=None, exp_misses=None, exp_refhits=None): self.assertEqual(r, exp) if exp_hits is not None: self.assertEqual(self.lru.hits, exp_hits) if exp_misses is not None: self.assertEqual(self.lru.misses, exp_misses) if exp_refhits is not None: self.assertEqual(self.lru.refhits, exp_refhits) # tests @defer.inlineCallbacks def test_single_key(self): # just get an item res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) # second time, it should be cached.. self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 1) @defer.inlineCallbacks def test_simple_lru_expulsion(self): res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('b') self.check_result(res, short('b'), 0, 2) res = yield self.lru.get('c') self.check_result(res, short('c'), 0, 3) res = yield self.lru.get('d') self.check_result(res, short('d'), 0, 4) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, long('a'), 0, 5) # ..and that expelled B, but C is still in the cache res = yield self.lru.get('c') self.check_result(res, short('c'), 1, 5) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1(self): self.lru = lru.AsyncLRUCache(self.short_miss_fn, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 1) res = yield self.lru.get('b') self.check_result(res, short('b'), 1, 2) gc.collect() # now try 'a' again - it should be a miss self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, long('a'), 1, 3) gc.collect() # ..and that expelled B res = yield self.lru.get('b') self.check_result(res, long('b'), 1, 4) @defer.inlineCallbacks def test_simple_lru_expulsion_maxsize_1_null_result(self): # a regression test for #2011 def miss_fn(k): if k == 'b': return defer.succeed(None) return defer.succeed(short(k)) self.lru = lru.AsyncLRUCache(miss_fn, 1) res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) res = yield self.lru.get('b') self.check_result(res, None, 0, 2) # 'a' was not expelled since 'b' was None self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 1, 2) @defer.inlineCallbacks def test_queue_collapsing(self): # just to check that we're practicing with the right queue size (so # QUEUE_SIZE_FACTOR is 10) self.assertEqual(self.lru.max_queue, 30) for c in 'a' + 'x' * 27 + 'ab': res = yield self.lru.get(c) self.check_result(res, short('b'), 27, 3) # at this point, we should have 'x', 'a', and 'b' in the cache, and # 'axx..xxab' in the queue. self.assertEqual(len(self.lru.queue), 30) # This 'get' operation for an existing key should cause compaction res = yield self.lru.get('b') self.check_result(res, short('b'), 28, 3) self.assertEqual(len(self.lru.queue), 3) # expect a cached short('a') self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('a') self.check_result(res, short('a'), 29, 3) @defer.inlineCallbacks def test_all_misses(self): for i, c in enumerate(string.ascii_lowercase + string.ascii_uppercase): res = yield self.lru.get(c) self.check_result(res, short(c), 0, i + 1) @defer.inlineCallbacks def test_get_exception(self): def fail_miss_fn(k): return defer.fail(RuntimeError("oh noes")) self.lru.miss_fn = fail_miss_fn got_exc = False try: yield self.lru.get('abc') except RuntimeError: got_exc = True self.assertEqual(got_exc, True) @defer.inlineCallbacks def test_all_hits(self): res = yield self.lru.get('a') self.check_result(res, short('a'), 0, 1) self.lru.miss_fn = self.long_miss_fn for i in range(100): res = yield self.lru.get('a') self.check_result(res, short('a'), i + 1, 1) @defer.inlineCallbacks def test_weakrefs(self): res_a = yield self.lru.get('a') self.check_result(res_a, short('a')) # note that res_a keeps a reference to this value res_b = yield self.lru.get('b') self.check_result(res_b, short('b')) del res_b # discard reference to b # blow out the cache and the queue self.lru.miss_fn = self.long_miss_fn for c in (string.ascii_lowercase[2:] * 5): yield self.lru.get(c) # and fetch a again, expecting the cached value res = yield self.lru.get('a') self.check_result(res, res_a, exp_refhits=1) # but 'b' should give us a new value res = yield self.lru.get('b') self.check_result(res, long('b'), exp_refhits=1) @defer.inlineCallbacks def test_fuzz(self): chars = list(string.ascii_lowercase * 40) random.shuffle(chars) for i, c in enumerate(chars): res = yield self.lru.get(c) self.check_result(res, short(c)) @defer.inlineCallbacks def test_massively_parallel(self): chars = list(string.ascii_lowercase * 5) misses = [0] def slow_short_miss_fn(key): d = defer.Deferred() misses[0] += 1 reactor.callLater(0, lambda: d.callback(short(key))) return d self.lru.miss_fn = slow_short_miss_fn def check(c, d): d.addCallback(self.check_result, short(c)) return d yield defer.gatherResults([ check(c, self.lru.get(c)) for c in chars]) self.assertEqual(misses[0], 26) self.assertEqual(self.lru.misses, 26) self.assertEqual(self.lru.hits, 4 * 26) @defer.inlineCallbacks def test_slow_fetch(self): def slower_miss_fn(k): d = defer.Deferred() reactor.callLater(0.05, lambda: d.callback(short(k))) return d self.lru.miss_fn = slower_miss_fn def do_get(test_d, k): d = self.lru.get(k) d.addCallback(self.check_result, short(k)) d.addCallbacks(test_d.callback, test_d.errback) ds = [] for i in range(8): d = defer.Deferred() reactor.callLater(0.02 * i, do_get, d, 'x') ds.append(d) yield defer.gatherResults(ds) self.assertEqual((self.lru.hits, self.lru.misses), (7, 1)) def test_slow_failure(self): def slow_fail_miss_fn(k): d = defer.Deferred() reactor.callLater(0.05, lambda: d.errback(failure.Failure(RuntimeError("oh noes")))) return d self.lru.miss_fn = slow_fail_miss_fn @defer.inlineCallbacks def do_get(test_d, k): d = self.lru.get(k) yield self.assertFailure(d, RuntimeError) d.addCallbacks(test_d.callback, test_d.errback) ds = [] for i in range(8): d = defer.Deferred() reactor.callLater(0.02 * i, do_get, d, 'x') ds.append(d) d = defer.gatherResults(ds) return d @defer.inlineCallbacks def test_set_max_size(self): # load up the cache with three items for c in 'abc': res = yield self.lru.get(c) self.check_result(res, short(c)) # reset the size to 1 self.lru.set_max_size(1) gc.collect() # and then expect that 'b' is no longer in the cache self.lru.miss_fn = self.long_miss_fn res = yield self.lru.get('b') self.check_result(res, long('b')) @defer.inlineCallbacks def test_miss_fn_kwargs(self): def keep_kwargs_miss_fn(k, **kwargs): return defer.succeed(set(kwargs.keys())) self.lru.miss_fn = keep_kwargs_miss_fn res = yield self.lru.get('a', a=1, b=2) self.check_result(res, set(['a', 'b']), 0, 1) @defer.inlineCallbacks def test_miss_fn_returns_none(self): calls = [] def none_miss_fn(k): calls.append(k) return defer.succeed(None) self.lru.miss_fn = none_miss_fn for i in range(2): self.assertEqual((yield self.lru.get('a')), None) # check that the miss_fn was called twice self.assertEqual(calls, ['a', 'a']) @defer.inlineCallbacks def test_put(self): self.assertEqual((yield self.lru.get('p')), short('p')) self.lru.put('p', set(['P2P2'])) self.assertEqual((yield self.lru.get('p')), set(['P2P2'])) buildbot-2.6.0/master/buildbot/test/unit/test_util_maildir.py000066400000000000000000000057501361162603000244540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import dirs from buildbot.util import maildir class TestMaildirService(dirs.DirsMixin, unittest.TestCase): def setUp(self): self.maildir = os.path.abspath("maildir") self.newdir = os.path.join(self.maildir, "new") self.curdir = os.path.join(self.maildir, "cur") self.tmpdir = os.path.join(self.maildir, "tmp") self.setUpDirs(self.maildir, self.newdir, self.curdir, self.tmpdir) self.svc = None def tearDown(self): if self.svc and self.svc.running: self.svc.stopService() self.tearDownDirs() # tests @defer.inlineCallbacks def test_start_stop_repeatedly(self): self.svc = maildir.MaildirService(self.maildir) self.svc.startService() yield self.svc.stopService() self.svc.startService() yield self.svc.stopService() self.assertEqual(len(list(self.svc)), 0) @defer.inlineCallbacks def test_messageReceived(self): self.svc = maildir.MaildirService(self.maildir) # add a fake messageReceived method messagesReceived = [] def messageReceived(filename): messagesReceived.append(filename) return defer.succeed(None) self.svc.messageReceived = messageReceived yield defer.maybeDeferred(self.svc.startService) self.assertEqual(messagesReceived, []) tmpfile = os.path.join(self.tmpdir, "newmsg") newfile = os.path.join(self.newdir, "newmsg") open(tmpfile, "w").close() os.rename(tmpfile, newfile) # TODO: can we wait for a dnotify somehow, if enabled? yield self.svc.poll() self.assertEqual(messagesReceived, ['newmsg']) def test_moveToCurDir(self): self.svc = maildir.MaildirService(self.maildir) tmpfile = os.path.join(self.tmpdir, "newmsg") newfile = os.path.join(self.newdir, "newmsg") open(tmpfile, "w").close() os.rename(tmpfile, newfile) f = self.svc.moveToCurDir("newmsg") f.close() self.assertEqual([os.path.exists(os.path.join(d, "newmsg")) for d in (self.newdir, self.curdir, self.tmpdir)], [False, True, False]) buildbot-2.6.0/master/buildbot/test/unit/test_util_misc.py000066400000000000000000000077461361162603000237750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.test.util.misc import TestReactorMixin from buildbot.util import misc class deferredLocked(unittest.TestCase): def test_name(self): self.assertEqual(util.deferredLocked, misc.deferredLocked) @defer.inlineCallbacks def test_fn(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def check_locked(arg1, arg2): self.assertEqual([lock.locked, arg1, arg2], [True, 1, 2]) return defer.succeed(None) yield check_locked(1, 2) self.assertFalse(lock.locked) @defer.inlineCallbacks def test_fn_fails(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def do_fail(): return defer.fail(RuntimeError("oh noes")) try: yield do_fail() self.fail("didn't errback") except Exception: self.assertFalse(lock.locked) @defer.inlineCallbacks def test_fn_exception(self): lock = defer.DeferredLock() @util.deferredLocked(lock) def do_fail(): raise RuntimeError("oh noes") # using decorators confuses pylint and gives a false positive below try: yield do_fail() # pylint: disable=assignment-from-no-return self.fail("didn't errback") except Exception: self.assertFalse(lock.locked) @defer.inlineCallbacks def test_method(self): testcase = self class C: @util.deferredLocked('aLock') def check_locked(self, arg1, arg2): testcase.assertEqual( [self.aLock.locked, arg1, arg2], [True, 1, 2]) return defer.succeed(None) obj = C() obj.aLock = defer.DeferredLock() yield obj.check_locked(1, 2) self.assertFalse(obj.aLock.locked) class TestCancelAfter(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.d = defer.Deferred() def test_succeeds(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertIdentical(d, self.d) @d.addCallback def check(r): self.assertEqual(r, "result") self.assertFalse(d.called) self.d.callback("result") self.assertTrue(d.called) @defer.inlineCallbacks def test_fails(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.d.errback(RuntimeError("oh noes")) self.assertTrue(d.called) yield self.assertFailure(d, RuntimeError) @defer.inlineCallbacks def test_timeout_succeeds(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.reactor.advance(11) d.callback("result") # ignored self.assertTrue(d.called) yield self.assertFailure(d, defer.CancelledError) @defer.inlineCallbacks def test_timeout_fails(self): d = misc.cancelAfter(10, self.d, self.reactor) self.assertFalse(d.called) self.reactor.advance(11) self.d.errback(RuntimeError("oh noes")) # ignored self.assertTrue(d.called) yield self.assertFailure(d, defer.CancelledError) buildbot-2.6.0/master/buildbot/test/unit/test_util_netstrings.py000066400000000000000000000032541361162603000252300ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.protocols import basic from twisted.trial import unittest from buildbot.util import netstrings class NetstringParser(unittest.TestCase): def test_valid_netstrings(self): p = netstrings.NetstringParser() p.feed("5:hello,5:world,") self.assertEqual(p.strings, [b'hello', b'world']) def test_valid_netstrings_byte_by_byte(self): # (this is really testing twisted's support, but oh well) p = netstrings.NetstringParser() [p.feed(c) for c in "5:hello,5:world,"] self.assertEqual(p.strings, [b'hello', b'world']) def test_invalid_netstring(self): p = netstrings.NetstringParser() with self.assertRaises(basic.NetstringParseError): p.feed("5-hello!") def test_incomplete_netstring(self): p = netstrings.NetstringParser() p.feed("11:hello world,6:foob") # note that the incomplete 'foobar' does not appear here self.assertEqual(p.strings, [b'hello world']) buildbot-2.6.0/master/buildbot/test/unit/test_util_notifier.py000066400000000000000000000067701361162603000246550ustar00rootroot00000000000000# Copyright Buildbot Team Members # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.python.failure import Failure from twisted.trial.unittest import SynchronousTestCase from buildbot.util import Notifier class TestException(Exception): """ An exception thrown in tests. """ class Tests(SynchronousTestCase): def test_wait(self): """ Calling `Notifier.wait` returns a deferred that hasn't fired. """ n = Notifier() self.assertNoResult(n.wait()) def test_notify_no_waiters(self): """ Calling `Notifier.notify` when there are no waiters does not raise. """ n = Notifier() n.notify(object()) # Does not raise. def test_notify_multiple_waiters(self): """ If there all multiple waiters, `Notifier.notify` fires all the deferreds with the same value. """ value = object() n = Notifier() ds = [n.wait(), n.wait()] n.notify(value) self.assertEqual( [self.successResultOf(d) for d in ds], [value] * 2, ) def test_new_waiters_not_notified(self): """ If a new waiter is added while notifying, it won't be notified until the next notification. """ value = object() n = Notifier() box = [] def add_new_waiter(_): box.append(n.wait()) n.wait().addCallback(add_new_waiter) n.notify(object()) self.assertNoResult(box[0]) n.notify(value) self.assertEqual( self.successResultOf(box[0]), value, ) def test_notify_failure(self): """ If a failure is passed to `Notifier.notify` then the waiters are errback'd. """ n = Notifier() d = n.wait() n.notify(Failure(TestException())) self.failureResultOf(d, TestException) def test_nonzero_waiters(self): """ If there are waiters, ``Notifier`` evaluates as `True`. """ n = Notifier() n.wait() self.assertTrue(n) def test_nonzero_no_waiters(self): """ If there no waiters, ``Notifier`` evaluates as `False`. """ n = Notifier() self.assertFalse(n) def test_nonzero_cleared_waiters(self): """ After notifying waiters, ``Notifier`` evaluates as `False`. """ n = Notifier() n.wait() n.notify(object()) self.assertFalse(n) buildbot-2.6.0/master/buildbot/test/unit/test_util_patch_delay.py000066400000000000000000000060711361162603000253050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial.unittest import SynchronousTestCase from buildbot.test.util.patch_delay import patchForDelay class TestException(Exception): pass def fun_to_patch(*args, **kwargs): return defer.succeed((args, kwargs)) def fun_to_patch_exception(): raise TestException() non_callable = 1 class Tests(SynchronousTestCase): def test_raises_not_found(self): with self.assertRaises(Exception): with patchForDelay(__name__ + '.notfound'): pass def test_raises_not_callable(self): with self.assertRaises(Exception): with patchForDelay(__name__ + '.non_callable'): pass def test_patches_within_context(self): d = fun_to_patch() self.assertTrue(d.called) with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) delay.fire() self.assertEqual(len(delay), 0) self.assertTrue(d.called) d = fun_to_patch() self.assertTrue(d.called) def test_auto_fires_unfired_delay(self): with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) self.assertTrue(d.called) def test_auto_fires_unfired_delay_exception(self): try: with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch() self.assertEqual(len(delay), 1) self.assertFalse(d.called) raise TestException() except TestException: pass self.assertTrue(d.called) def test_passes_arguments(self): with patchForDelay(__name__ + '.fun_to_patch') as delay: d = fun_to_patch('arg', kw='kwarg') self.assertEqual(len(delay), 1) delay.fire() args = self.successResultOf(d) self.assertEqual(args, (('arg',), {'kw': 'kwarg'})) def test_passes_exception(self): with patchForDelay(__name__ + '.fun_to_patch_exception') as delay: d = fun_to_patch_exception() self.assertEqual(len(delay), 1) delay.fire() f = self.failureResultOf(d) f.check(TestException) buildbot-2.6.0/master/buildbot/test/unit/test_util_pathmatch.py000066400000000000000000000065141361162603000250030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import pathmatch class Matcher(unittest.TestCase): def setUp(self): self.m = pathmatch.Matcher() def test_dupe_path(self): def set(): self.m[('abc,')] = 1 set() with self.assertRaises(AssertionError): set() def test_empty(self): with self.assertRaises(KeyError): self.m[('abc',)] def test_diff_length(self): self.m[('abc', 'def')] = 2 self.m[('ab', 'cd', 'ef')] = 3 self.assertEqual(self.m[('abc', 'def')], (2, {})) def test_same_length(self): self.m[('abc', 'def')] = 2 self.m[('abc', 'efg')] = 3 self.assertEqual(self.m[('abc', 'efg')], (3, {})) def test_pattern_variables(self): self.m[('A', ':a', 'B', ':b')] = 'AB' self.assertEqual(self.m[('A', 'a', 'B', 'b')], ('AB', dict(a='a', b='b'))) def test_pattern_variables_underscore(self): self.m[('A', ':a_a_a')] = 'AB' self.assertEqual(self.m[('A', 'a')], ('AB', dict(a_a_a='a'))) def test_pattern_variables_num(self): self.m[('A', 'n:a', 'B', 'n:b')] = 'AB' self.assertEqual(self.m[('A', '10', 'B', '-20')], ('AB', dict(a=10, b=-20))) def test_pattern_variables_ident(self): self.m[('A', 'i:a', 'B', 'i:b')] = 'AB' self.assertEqual(self.m[('A', 'abc', 'B', 'x-z-B')], ('AB', dict(a='abc', b='x-z-B'))) def test_pattern_variables_num_invalid(self): self.m[('A', 'n:a')] = 'AB' with self.assertRaises(KeyError): self.m[('A', '1x0')] def test_pattern_variables_ident_invalid(self): self.m[('A', 'i:a')] = 'AB' with self.assertRaises(KeyError): self.m[('A', '10')] def test_pattern_variables_ident_num_distinguised(self): self.m[('A', 'n:a')] = 'num' self.m[('A', 'i:a')] = 'ident' self.assertEqual(self.m[('A', '123')], ('num', dict(a=123))) self.assertEqual(self.m[('A', 'abc')], ('ident', dict(a='abc'))) def test_prefix_matching(self): self.m[('A', ':a')] = 'A' self.m[('A', ':a', 'B', ':b')] = 'AB' self.assertEqual( (self.m[('A', 'a1', 'B', 'b')], self.m['A', 'a2']), (('AB', dict(a='a1', b='b')), ('A', dict(a='a2')))) def test_dirty_again(self): self.m[('abc', 'def')] = 2 self.assertEqual(self.m[('abc', 'def')], (2, {})) self.m[('abc', 'efg')] = 3 self.assertEqual(self.m[('abc', 'def')], (2, {})) self.assertEqual(self.m[('abc', 'efg')], (3, {})) buildbot-2.6.0/master/buildbot/test/unit/test_util_poll.py000066400000000000000000000242341361162603000237770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util.misc import TestReactorMixin from buildbot.util import poll class TestPollerSync(TestReactorMixin, unittest.TestCase): @poll.method def poll(self): self.calls += 1 if self.fail: raise RuntimeError('oh noes') def setUp(self): self.setUpTestReactor() self.master = mock.Mock() self.master.reactor = self.reactor poll.track_poll_methods() self.calls = 0 self.fail = False def tearDown(self): poll.reset_poll_methods() self.assertEqual(self.reactor.getDelayedCalls(), []) def test_not_started(self): """If the poll method isn't started, nothing happens""" self.reactor.advance(100) self.assertEqual(self.calls, 0) def test_call_when_stopped(self): """Calling the poll method does nothing when stopped.""" self.poll() self.assertEqual(self.calls, 0) def test_call_when_started(self): """Calling the poll method when started forces a run.""" self.poll.start(interval=100, now=False) self.poll() self.reactor.advance(0) self.assertEqual(self.calls, 1) return self.poll.stop() def test_run_now(self): """If NOW is true, the poll runs immediately""" self.poll.start(interval=10, now=True) self.assertEqual(self.calls, 1) return self.poll.stop() def test_no_run_now(self): """If NOW is false, the poll does not run immediately""" self.poll.start(interval=10, now=False) self.assertEqual(self.calls, 0) return self.poll.stop() def test_stop_twice(self): """Calling stop on a stopped poller does nothing""" self.poll.start(interval=1) d = self.poll.stop() self.assertTrue(d.called) d = self.poll.stop() self.assertTrue(d.called) def test_start_twice(self): """Calling start on an already-started loop is an error.""" self.poll.start(interval=1) with self.assertRaises(Exception): self.poll.start(interval=2) return self.poll.stop() def test_repeats_and_stops(self): """Polling repeats until stopped, and stop returns a Deferred""" self.poll.start(interval=10, now=True) while self.reactor.seconds() <= 200: self.assertEqual(self.calls, (self.reactor.seconds() // 10) + 1) self.reactor.advance(1) d = self.poll.stop() self.assertTrue(d.called) self.assertEqual(self.calls, 21) self.reactor.advance(10) self.assertEqual(self.calls, 21) def test_fails(self): """If the poll function fails, it is still called again, but the exception is logged each time.""" self.fail = True self.poll.start(interval=1, now=True) self.assertEqual(self.calls, 1) self.reactor.advance(1) self.assertEqual(self.calls, 2) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) return self.poll.stop() class TestPollerAsync(TestReactorMixin, unittest.TestCase): @poll.method def poll(self): assert not self.running, "overlapping call" self.running = True d = defer.Deferred() self.reactor.callLater(self.duration, d.callback, None) @d.addCallback def inc(_): self.calls += 1 self.running = False @d.addCallback def maybeFail(_): if self.fail: raise RuntimeError('oh noes') return d def setUp(self): self.setUpTestReactor() self.master = mock.Mock() self.master.reactor = self.reactor poll.track_poll_methods() self.calls = 0 self.running = False self.duration = 1 self.fail = False def tearDown(self): poll.reset_poll_methods() def test_run_now(self): """If NOW is true, the poll begins immediately""" self.poll.start(interval=10, now=True) self.assertEqual(self.calls, 0) self.assertTrue(self.running) self.reactor.advance(self.duration) self.assertEqual(self.calls, 1) self.assertFalse(self.running) def test_no_run_now(self): """If NOW is false, the poll begins after the interval""" self.poll.start(interval=10, now=False) self.assertEqual(self.calls, 0) self.assertFalse(self.running) self.reactor.advance(10) self.assertEqual(self.calls, 0) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.assertFalse(self.running) def test_repeats_and_stops(self): """ Polling repeats until stopped, and stop returns a Deferred. The duration of the function's execution does not affect the execution interval: executions occur every 10 seconds. """ self.poll.start(interval=10, now=True) while self.reactor.seconds() <= 200: self.assertEqual(self.calls, (self.reactor.seconds() + 9) // 10) self.assertEqual(self.running, self.reactor.seconds() % 10 == 0) self.reactor.advance(1) d = self.poll.stop() self.assertTrue(d.called) self.assertEqual(self.calls, 21) self.reactor.advance(10) self.assertEqual(self.calls, 21) def test_fails(self): """If the poll function fails, it is still called again, but the exception is logged each time.""" self.fail = True self.poll.start(interval=10, now=True) self.reactor.advance(1) self.assertEqual(self.calls, 1) self.reactor.advance(10) self.assertTrue(self.running) self.reactor.advance(1) self.assertEqual(self.calls, 2) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 2) def test_stop_while_running(self): """If stop is called while the poll function is running, then stop's Deferred does not fire until the run is complete.""" self.duration = 2 self.poll.start(interval=10) self.reactor.advance(10) self.assertTrue(self.running) d = self.poll.stop() self.assertFalse(d.called) # not stopped yet self.reactor.advance(1) self.assertFalse(d.called) self.reactor.advance(1) self.assertTrue(d.called) def test_call_while_running(self): """Calling the poll method while the decorated method is running causes a second call as soon as the first is done.""" self.duration = 5 self.poll.start(interval=10, now=True) self.reactor.advance(3) self.poll() self.reactor.advance(2) self.assertEqual(self.calls, 1) self.reactor.advance(5) self.assertEqual(self.calls, 2) def test_call_while_running_then_stop(self): """Calling the poll method while the decorated method is running, then calling stop will wait for both invocations to complete.""" self.duration = 5 self.poll.start(interval=10, now=True) self.reactor.advance(3) self.poll() d = self.poll.stop() self.reactor.advance(2) self.assertEqual(self.calls, 1) self.reactor.advance(4) self.assertEqual(self.calls, 1) self.assertFalse(d.called) self.reactor.advance(1) self.assertEqual(self.calls, 2) self.assertTrue(d.called) def test_stop_twice_while_running(self): """If stop is called *twice* while the poll function is running, then neither Deferred fires until the run is complete.""" self.duration = 2 self.poll.start(interval=10) self.reactor.advance(10) self.assertTrue(self.running) d1 = self.poll.stop() self.assertFalse(d1.called) # not stopped yet self.reactor.advance(1) d2 = self.poll.stop() self.assertFalse(d2.called) self.reactor.advance(1) self.assertTrue(d1.called) self.assertTrue(d2.called) def test_stop_and_restart(self): """If the method is immediately restarted from a callback on a stop Deferred, the polling continues with the new start time.""" self.duration = 6 self.poll.start(interval=10) self.reactor.advance(10) self.assertTrue(self.running) d = self.poll.stop() d.addCallback(lambda _: self.poll.start(interval=10)) self.assertFalse(d.called) # not stopped yet self.reactor.advance(6) self.assertFalse(self.running) self.assertTrue(d.called) self.reactor.advance(10) self.assertEqual(self.reactor.seconds(), 26) self.assertTrue(self.running) def test_long_method(self): """If the method takes more than INTERVAL seconds to execute, then it is re-invoked at the next multiple of INTERVAL seconds""" self.duration = 4 self.poll.start(interval=3, now=True) exp = [ (0, True, 0), (1, True, 0), (2, True, 0), (3, True, 0), (4, False, 1), (5, False, 1), (6, True, 1), # next multiple of 3 (10, False, 2), (12, True, 2), (16, False, 3), ] for secs, running, calls in exp: while self.reactor.seconds() < secs: self.reactor.advance(1) self.assertEqual(self.running, running) self.assertEqual(self.calls, calls) buildbot-2.6.0/master/buildbot/test/unit/test_util_private_tempdir.py000066400000000000000000000035451361162603000262310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil import tempfile from twisted.trial import unittest from buildbot.test.util.decorators import skipUnlessPlatformIs from buildbot.util.private_tempdir import PrivateTemporaryDirectory class TestTemporaryDirectory(unittest.TestCase): # In this test we want to also check potential platform differences, so # we don't mock the filesystem access def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def test_simple(self): with PrivateTemporaryDirectory(dir=self.tempdir) as dir: self.assertTrue(os.path.isdir(dir)) self.assertFalse(os.path.isdir(dir)) @skipUnlessPlatformIs('posix') def test_mode(self): with PrivateTemporaryDirectory(dir=self.tempdir, mode=0o700) as dir: self.assertEqual(0o40700, os.stat(dir).st_mode) def test_cleanup(self): ctx = PrivateTemporaryDirectory(dir=self.tempdir) self.assertTrue(os.path.isdir(ctx.name)) ctx.cleanup() self.assertFalse(os.path.isdir(ctx.name)) ctx.cleanup() # also check whether multiple calls don't throw ctx.cleanup() buildbot-2.6.0/master/buildbot/test/unit/test_util_raml.py000066400000000000000000000063171361162603000237660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import textwrap from twisted.trial import unittest from buildbot.util import raml class TestRaml(unittest.TestCase): def setUp(self): self.api = raml.RamlSpec() def test_api(self): self.assertTrue(self.api.api is not None) def test_endpoints(self): self.assertIn( "/masters/{masterid}/builders/{builderid}/workers/{workerid}", self.api.endpoints.keys()) def test_endpoints_uri_parameters(self): # comparison of OrderedDict do not take in account order :( # this is why we compare str repr, to make sure the endpoints are in # the right order self.assertEqual(str(self.api.endpoints[ "/masters/{masterid}/builders/{builderid}/workers/{workerid}"]['uriParameters']), str(raml.OrderedDict([ ('masterid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the master')])), ('builderid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the builder')])), ('workerid', raml.OrderedDict([ ('type', 'number'), ('description', 'the id of the worker')]))])) ) def test_types(self): self.assertIn( "log", self.api.types.keys()) def test_json_example(self): self.assertEqual( textwrap.dedent( self.api.format_json(self.api.types["build"]['example'], 0)), textwrap.dedent(""" { "builderid": 10, "buildid": 100, "buildrequestid": 13, "workerid": 20, "complete": false, "complete_at": null, "masterid": 824, "number": 1, "results": null, "started_at": 1451001600, "state_string": "created", "properties": {} }""").strip()) def test_endpoints_by_type(self): self.assertIn( "/masters/{masterid}/builders/{builderid}/workers/{workerid}", self.api.endpoints_by_type['worker'].keys()) def test_iter_actions(self): build = self.api.endpoints_by_type['build'] actions = dict(self.api.iter_actions(build['/builds/{buildid}'])) self.assertEqual(sorted(actions.keys()), sorted(['rebuild', 'stop'])) def test_rawendpoints(self): self.assertIn( "/steps/{stepid}/logs/{log_slug}/raw", self.api.rawendpoints.keys()) buildbot-2.6.0/master/buildbot/test/unit/test_util_sautils.py000066400000000000000000000016111361162603000245070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.util import sautils class SAVersion(unittest.TestCase): def test_sa_version(self): self.assertTrue(sautils.sa_version() > (0, 5, 0)) buildbot-2.6.0/master/buildbot/test/unit/test_util_service.py000066400000000000000000000664451361162603000245030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.trial import unittest from buildbot import config from buildbot.process.properties import Interpolate from buildbot.util import service class DeferredStartStop(service.AsyncService): def startService(self): self.d = defer.Deferred() return self.d def stopService(self): self.d = defer.Deferred() return self.d class AsyncMultiService(unittest.TestCase): def setUp(self): self.svc = service.AsyncMultiService() @defer.inlineCallbacks def test_empty(self): yield self.svc.startService() yield self.svc.stopService() @defer.inlineCallbacks def test_waits_for_child_services(self): child = DeferredStartStop() yield child.setServiceParent(self.svc) d = self.svc.startService() self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) d = self.svc.stopService() self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) @defer.inlineCallbacks def test_child_fails(self): child = DeferredStartStop() yield child.setServiceParent(self.svc) d = self.svc.startService() self.assertFalse(d.called) child.d.errback(RuntimeError('oh noes')) self.assertTrue(d.called) @d.addErrback def check(f): f.check(RuntimeError) d = self.svc.stopService() self.assertFalse(d.called) child.d.errback(RuntimeError('oh noes')) self.assertTrue(d.called) @d.addErrback def check_again(f): f.check(RuntimeError) def test_child_starts_on_sSP(self): d = self.svc.startService() self.assertTrue(d.called) child = DeferredStartStop() d = child.setServiceParent(self.svc) self.assertFalse(d.called) child.d.callback(None) self.assertTrue(d.called) class ClusteredBuildbotService(unittest.TestCase): SVC_NAME = 'myName' SVC_ID = 20 class DummyService(service.ClusteredBuildbotService): pass def setUp(self): self.svc = self.makeService() def tearDown(self): pass def makeService(self, name=SVC_NAME, serviceid=SVC_ID): svc = self.DummyService(name=name) svc.clock = task.Clock() self.setServiceClaimable(svc, defer.succeed(False)) self.setActivateToReturn(svc, defer.succeed(None)) self.setDeactivateToReturn(svc, defer.succeed(None)) self.setGetServiceIdToReturn(svc, defer.succeed(serviceid)) self.setUnclaimToReturn(svc, defer.succeed(None)) return svc def makeMock(self, value): mockObj = mock.Mock() if isinstance(value, Exception): mockObj.side_effect = value else: mockObj.return_value = value return mockObj def setServiceClaimable(self, svc, claimable): svc._claimService = self.makeMock(claimable) def setGetServiceIdToReturn(self, svc, serviceid): svc._getServiceId = self.makeMock(serviceid) def setUnclaimToReturn(self, svc, unclaim): svc._unclaimService = self.makeMock(unclaim) def setActivateToReturn(self, svc, activate): svc.activate = self.makeMock(activate) def setDeactivateToReturn(self, svc, deactivate): svc.deactivate = self.makeMock(deactivate) def test_name_PreservesUnicodePromotion(self): svc = self.makeService(name='n') self.assertIsInstance(svc.name, str) self.assertEqual(svc.name, 'n') def test_name_GetsUnicodePromotion(self): svc = self.makeService(name='n') self.assertIsInstance(svc.name, str) self.assertEqual(svc.name, 'n') def test_compare(self): a = self.makeService(name='a', serviceid=20) b1 = self.makeService(name='b', serviceid=21) b2 = self.makeService(name='b', serviceid=21) # same args as 'b1' b3 = self.makeService(name='b', serviceid=20) # same id as 'a' self.assertTrue(a == a) self.assertTrue(a != b1) self.assertTrue(a != b2) self.assertTrue(a != b3) self.assertTrue(b1 != a) self.assertTrue(b1 == b1) self.assertTrue(b1 == b2) self.assertTrue(b1 == b3) def test_create_NothingCalled(self): # None of the member functions get called until startService happens self.assertFalse(self.svc.activate.called) self.assertFalse(self.svc.deactivate.called) self.assertFalse(self.svc._getServiceId.called) self.assertFalse(self.svc._claimService.called) self.assertFalse(self.svc._unclaimService.called) def test_create_IsInactive(self): # starts in inactive state self.assertFalse(self.svc.isActive()) def test_create_HasNoServiceIdYet(self): # has no service id at first self.assertIdentical(self.svc.serviceid, None) def test_start_UnclaimableSoNotActiveYet(self): self.svc.startService() self.assertFalse(self.svc.isActive()) def test_start_GetsServiceIdAssigned(self): self.svc.startService() self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(self.SVC_ID, self.svc.serviceid) def test_start_WontPollYet(self): self.svc.startService() # right before the poll interval, nothing has tried again yet self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 0.95) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertFalse(self.svc.isActive()) @defer.inlineCallbacks def test_start_PollButClaimFails(self): yield self.svc.startService() # at the POLL time, it gets called again, but we're still inactive... self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 1.05) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(2, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_start_PollsPeriodically(self): NUMBER_OF_POLLS = 15 self.svc.startService() for i in range(NUMBER_OF_POLLS): self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual( 1 + NUMBER_OF_POLLS, self.svc._claimService.call_count) def test_start_ClaimSucceeds(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) def test_start_PollingAfterClaimSucceedsDoesNothing(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # another epoch shouldn't do anything further... self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 2) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) def test_stopWhileStarting_NeverActive(self): self.svc.startService() # .. claim fails stopDeferred = self.svc.stopService() # a stop at this point unwinds things immediately self.successResultOf(stopDeferred) # advance the clock, and nothing should happen self.svc.clock.advance(self.svc.POLL_INTERVAL_SEC * 2) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertFalse(self.svc.isActive()) def test_stop_AfterActivated(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # now deactivate: stopDeferred = self.svc.stopService() # immediately stops self.successResultOf(stopDeferred) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_AfterActivated_NoDeferred(self): # set all the child-class functions to return non-deferreds, # just to check we can handle both: self.setServiceClaimable(self.svc, True) self.setActivateToReturn(self.svc, None) self.setDeactivateToReturn(self.svc, None) self.setGetServiceIdToReturn(self.svc, self.SVC_ID) self.setUnclaimToReturn(self.svc, None) self.svc.startService() # now deactivate: stopDeferred = self.svc.stopService() # immediately stops self.successResultOf(stopDeferred) self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_getServiceIdTakesForever(self): # create a deferred that will take a while... svcIdDeferred = defer.Deferred() self.setGetServiceIdToReturn(self.svc, svcIdDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it has the service id (the svcIdDeferred is stuck) stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(0, self.svc._claimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let service id part finish svcIdDeferred.callback(None) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_claimServiceTakesForever(self): # create a deferred that will take a while... claimDeferred = defer.Deferred() self.setServiceClaimable(self.svc, claimDeferred) self.svc.startService() # .. claim is still pending here # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(0, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let claim succeed, but we should see things unwind claimDeferred.callback(True) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stopWhileStarting_activateTakesForever(self): """If activate takes forever, things acquiesce nicely""" # create a deferreds that will take a while... activateDeferred = defer.Deferred() self.setActivateToReturn(self.svc, activateDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(0, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(True, self.svc.isActive()) # then let activate finish activateDeferred.callback(None) # ... which will cause the stop to also finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.activate.call_count) self.assertEqual(1, self.svc._getServiceId.call_count) self.assertEqual(1, self.svc._claimService.call_count) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_unclaimTakesForever(self): # create a deferred that will take a while... unclaimDeferred = defer.Deferred() self.setUnclaimToReturn(self.svc, unclaimDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) # .. no deactivates yet.... self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let unclaim part finish unclaimDeferred.callback(None) # ... which will cause the stop to finish self.successResultOf(stopDeferred) # and everything should unwind: self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_stop_deactivateTakesForever(self): # create a deferred that will take a while... deactivateDeferred = defer.Deferred() self.setDeactivateToReturn(self.svc, deactivateDeferred) self.setServiceClaimable(self.svc, defer.succeed(True)) self.svc.startService() # stop before it's done activating stopDeferred = self.svc.stopService() self.assertNoResult(stopDeferred) self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(0, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) # then let deactivate finish deactivateDeferred.callback(None) # ... which will cause the stop to finish self.successResultOf(stopDeferred) # and everything else should unwind too: self.assertEqual(1, self.svc.deactivate.call_count) self.assertEqual(1, self.svc._unclaimService.call_count) self.assertEqual(False, self.svc.isActive()) def test_claim_raises(self): self.setServiceClaimable(self.svc, RuntimeError()) self.svc.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) @defer.inlineCallbacks def test_activate_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setActivateToReturn(self.svc, RuntimeError()) yield self.svc.startService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) # half-active: we actually return True in this case: self.assertEqual(True, self.svc.isActive()) def test_deactivate_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setDeactivateToReturn(self.svc, RuntimeError()) self.svc.startService() self.svc.stopService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) def test_unclaim_raises(self): self.setServiceClaimable(self.svc, defer.succeed(True)) self.setUnclaimToReturn(self.svc, RuntimeError()) self.svc.startService() self.svc.stopService() self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) self.assertEqual(False, self.svc.isActive()) class MyService(service.BuildbotService): def checkConfig(self, foo, a=None): if a is None: config.error("a must be specified") return defer.succeed(True) def reconfigService(self, *argv, **kwargs): self.config = argv, kwargs return defer.succeed(None) class fakeConfig: pass class fakeMaster(service.MasterService, service.ReconfigurableServiceMixin): pass def makeFakeMaster(): m = fakeMaster() m.db = mock.Mock() return m class BuildbotService(unittest.TestCase): def setUp(self): self.master = makeFakeMaster() @defer.inlineCallbacks def prepareService(self): self.master.config = fakeConfig() serv = MyService(1, a=2, name="basic") yield serv.setServiceParent(self.master) yield self.master.startService() yield serv.reconfigServiceWithSibling(serv) return serv @defer.inlineCallbacks def testNominal(self): yield self.prepareService() self.assertEqual( self.master.namedServices["basic"].config, ((1,), dict(a=2))) @defer.inlineCallbacks def testConfigDict(self): serv = yield self.prepareService() self.assertEqual(serv.getConfigDict(), { 'args': (1,), 'class': 'buildbot.test.unit.test_util_service.MyService', 'kwargs': {'a': 2}, 'name': 'basic'}) def testNoName(self): with self.assertRaises(ValueError): MyService(1, a=2) def testChecksDone(self): with self.assertRaises(config.ConfigErrors): MyService(1, name="foo") class BuildbotServiceManager(unittest.TestCase): def setUp(self): self.master = makeFakeMaster() @defer.inlineCallbacks def prepareService(self): self.master.config = fakeConfig() serv = MyService(1, a=2, name="basic") self.master.config.services = {"basic": serv} self.manager = service.BuildbotServiceManager() yield self.manager.setServiceParent(self.master) yield self.master.startService() yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) return serv @defer.inlineCallbacks def testNominal(self): yield self.prepareService() self.assertEqual( self.manager.namedServices["basic"].config, ((1,), dict(a=2))) @defer.inlineCallbacks def testReconfigNoChange(self): serv = yield self.prepareService() serv.config = None # 'de-configure' the service # reconfigure with the same config serv2 = MyService(1, a=2, name="basic") self.master.config.services = {"basic": serv2} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is not used self.assertNotIdentical(self.manager.namedServices["basic"], serv2) # reconfigServiceWithConstructorArgs was not called self.assertEqual(serv.config, None) @defer.inlineCallbacks def testReconfigWithChanges(self): serv = yield self.prepareService() serv.config = None # 'de-configure' the service # reconfigure with the different config serv2 = MyService(1, a=4, name="basic") self.master.config.services = {"basic": serv2} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is not used self.assertNotIdentical(self.manager.namedServices["basic"], serv2) # reconfigServiceWithConstructorArgs was called with new config self.assertEqual(serv.config, ((1,), dict(a=4))) def testNoName(self): with self.assertRaises(ValueError): MyService(1, a=2) def testChecksDone(self): with self.assertRaises(config.ConfigErrors): MyService(1, name="foo") @defer.inlineCallbacks def testReconfigWithNew(self): serv = yield self.prepareService() # reconfigure with the new service serv2 = MyService(1, a=4, name="basic2") self.master.config.services['basic2'] = serv2 # the second service is not there yet self.assertIdentical(self.manager.namedServices.get("basic2"), None) # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices["basic"], serv) # the second service is created self.assertIdentical(self.manager.namedServices["basic2"], serv2) # reconfigServiceWithConstructorArgs was called with new config self.assertEqual(serv2.config, ((1,), dict(a=4))) @defer.inlineCallbacks def testReconfigWithDeleted(self): serv = yield self.prepareService() self.assertEqual(serv.running, True) # remove all self.master.config.services = {} # reconfigure the master yield self.master.reconfigServiceWithBuildbotConfig(self.master.config) # the first service is still used self.assertIdentical(self.manager.namedServices.get("basic"), None) self.assertEqual(serv.running, False) @defer.inlineCallbacks def testConfigDict(self): yield self.prepareService() self.assertEqual(self.manager.getConfigDict(), { 'childs': [{ 'args': (1,), 'class': 'buildbot.test.unit.test_util_service.MyService', 'kwargs': {'a': 2}, 'name': 'basic'}], 'name': 'services'}) @defer.inlineCallbacks def testRenderSecrets(self): yield self.prepareService() service = self.manager.namedServices['basic'] test = yield service.renderSecrets(Interpolate('test_string')) self.assertEqual(test, 'test_string') @defer.inlineCallbacks def testRenderSecrets2Args(self): yield self.prepareService() service = self.manager.namedServices['basic'] test, test2 = yield service.renderSecrets(Interpolate('test_string'), 'ok_for_non_renderable') self.assertEqual(test, 'test_string') self.assertEqual(test2, 'ok_for_non_renderable') @defer.inlineCallbacks def testRenderSecretsWithTuple(self): yield self.prepareService() service = self.manager.namedServices['basic'] test = yield service.renderSecrets(('user', Interpolate('test_string'))) self.assertEqual(test, ('user', 'test_string')) class UnderTestSharedService(service.SharedService): def __init__(self, arg1=None): super().__init__() class UnderTestDependentService(service.AsyncService): @defer.inlineCallbacks def startService(self): self.dependent = yield UnderTestSharedService.getService(self.parent) def stopService(self): assert self.dependent.running class SharedService(unittest.TestCase): @defer.inlineCallbacks def test_bad_constructor(self): parent = service.AsyncMultiService() with self.assertRaises(Exception): yield UnderTestSharedService.getService(parent, arg2="foo") @defer.inlineCallbacks def test_creation(self): parent = service.AsyncMultiService() r = yield UnderTestSharedService.getService(parent) r2 = yield UnderTestSharedService.getService(parent) r3 = yield UnderTestSharedService.getService(parent, "arg1") r4 = yield UnderTestSharedService.getService(parent, "arg1") self.assertIdentical(r, r2) self.assertNotIdentical(r, r3) self.assertIdentical(r3, r4) self.assertEqual(len(list(iter(parent))), 2) @defer.inlineCallbacks def test_startup(self): """the service starts when parent starts and stop""" parent = service.AsyncMultiService() r = yield UnderTestSharedService.getService(parent) self.assertEqual(r.running, 0) yield parent.startService() self.assertEqual(r.running, 1) yield parent.stopService() self.assertEqual(r.running, 0) @defer.inlineCallbacks def test_already_started(self): """the service starts during the getService if parent already started""" parent = service.AsyncMultiService() yield parent.startService() r = yield UnderTestSharedService.getService(parent) self.assertEqual(r.running, 1) # then we stop the parent, and the shared service stops yield parent.stopService() self.assertEqual(r.running, 0) @defer.inlineCallbacks def test_already_stopped_last(self): parent = service.AsyncMultiService() o = UnderTestDependentService() yield o.setServiceParent(parent) yield parent.startService() yield parent.stopService() buildbot-2.6.0/master/buildbot/test/unit/test_util_ssl.py000066400000000000000000000026641361162603000236350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.trial import unittest from buildbot import config from buildbot.util import ssl class Tests(unittest.TestCase): @ssl.skipUnless def test_ClientContextFactory(self): from twisted.internet.ssl import ClientContextFactory self.assertEqual(ssl.ClientContextFactory, ClientContextFactory) @ssl.skipUnless def test_ConfigError(self): ssl.ssl_import_error = "lib xxx do not exist" ssl.has_ssl = False self.patch(config, "_errors", mock.Mock()) ssl.ensureHasSSL("myplugin") config._errors.addError.assert_called_with( "TLS dependencies required for myplugin are not installed : " "lib xxx do not exist\n pip install 'buildbot[tls]'") buildbot-2.6.0/master/buildbot/test/unit/test_util_state.py000066400000000000000000000051571361162603000241540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import state class FakeObject(state.StateMixin): name = "fake-name" def __init__(self, master): self.master = master class TestStateMixin(TestReactorMixin, unittest.TestCase): OBJECTID = 19 def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True) self.object = FakeObject(self.master) @defer.inlineCallbacks def test_getState(self): self.master.db.state.fakeState('fake-name', 'FakeObject', fav_color=['red', 'purple']) res = yield self.object.getState('fav_color') self.assertEqual(res, ['red', 'purple']) @defer.inlineCallbacks def test_getState_default(self): res = yield self.object.getState('fav_color', 'black') self.assertEqual(res, 'black') def test_getState_KeyError(self): self.master.db.state.fakeState('fake-name', 'FakeObject', fav_color=['red', 'purple']) d = self.object.getState('fav_book') def cb(_): self.fail("should not succeed") def check_exc(f): f.trap(KeyError) d.addCallbacks(cb, check_exc) return d @defer.inlineCallbacks def test_setState(self): yield self.object.setState('y', 14) self.master.db.state.assertStateByClass('fake-name', 'FakeObject', y=14) @defer.inlineCallbacks def test_setState_existing(self): self.master.db.state.fakeState('fake-name', 'FakeObject', x=13) yield self.object.setState('x', 14) self.master.db.state.assertStateByClass('fake-name', 'FakeObject', x=14) buildbot-2.6.0/master/buildbot/test/unit/test_util_subscriptions.py000066400000000000000000000063661361162603000257460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.util import subscription class subscriptions(unittest.TestCase): def setUp(self): self.subpt = subscription.SubscriptionPoint('test_sub') def test_str(self): self.assertIn('test_sub', str(self.subpt)) def test_subscribe_unsubscribe(self): state = [] def cb(*args, **kwargs): state.append((args, kwargs)) # subscribe sub = self.subpt.subscribe(cb) self.assertTrue(isinstance(sub, subscription.Subscription)) self.assertEqual(state, []) # deliver self.subpt.deliver(1, 2, a=3, b=4) self.assertEqual(state, [((1, 2), dict(a=3, b=4))]) state.pop() # unsubscribe sub.unsubscribe() # don't receive events anymore self.subpt.deliver(3, 4) self.assertEqual(state, []) def test_exception(self): def cb(*args, **kwargs): raise RuntimeError('mah bucket!') # subscribe self.subpt.subscribe(cb) try: self.subpt.deliver() except RuntimeError: self.fail("should not have seen exception here!") # log.err will cause Trial to complain about this error anyway, unless # we clean it up self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError))) def test_deliveries_finished(self): state = [] def create_cb(d): def cb(*args): state.append(args) return d return cb d1 = defer.Deferred() d2 = defer.Deferred() self.subpt.subscribe(create_cb(d1)) self.subpt.subscribe(create_cb(d2)) self.assertEqual(state, []) self.subpt.deliver(1, 2) self.assertEqual(state, [(1, 2), (1, 2)]) d = self.subpt.waitForDeliveriesToFinish() self.assertFalse(d.called) d1.callback(None) self.assertFalse(d.called) d2.callback(None) self.assertTrue(d.called) # when there are no waiting deliveries, should call the callback immediately d = self.subpt.waitForDeliveriesToFinish() self.assertTrue(d.called) def test_deliveries_not_finished_within_callback(self): state = [] def cb(*args): state.append(args) d = self.subpt.waitForDeliveriesToFinish() self.assertFalse(d.called) self.subpt.subscribe(cb) self.assertEqual(state, []) self.subpt.deliver(1, 2) self.assertEqual(state, [(1, 2)]) buildbot-2.6.0/master/buildbot/test/unit/test_util_tuplematch.py000066400000000000000000000024031361162603000251710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import tuplematching from buildbot.util import tuplematch class MatchTuple(tuplematching.TupleMatchingMixin, unittest.TestCase): # called by the TupleMatchingMixin methods def do_test_match(self, routingKey, shouldMatch, filter): result = tuplematch.matchTuple(routingKey, filter) self.assertEqual(shouldMatch, result, '%r %s %r' % (routingKey, 'should match' if shouldMatch else "shouldn't match", filter)) buildbot-2.6.0/master/buildbot/test/unit/test_version.py000066400000000000000000000042661361162603000234640ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest class VersioningUtilsTests(unittest.SynchronousTestCase): # Version utils are copied in three packages. # this unit test is made to be able to test the three versions # with the same test module_under_test = "buildbot" def setUp(self): try: self.m = __import__(self.module_under_test) except ImportError: raise unittest.SkipTest(self.module_under_test + " package is not installed") def test_gitDescribeToPep440devVersion(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.8-20-gf0f45ca"), "0.9.9-dev20") def test_gitDescribeToPep440tag(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.8"), "0.9.8") def test_gitDescribeToPep440p1tag(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.9.post1"), "0.9.9.post1") def test_gitDescribeToPep440p1dev(self): self.assertEqual(self.m.gitDescribeToPep440("v0.9.9.post1-20-gf0f45ca"), "0.9.10-dev20") def test_getVersionFromArchiveIdNoTag(self): self.assertEqual(self.m.getVersionFromArchiveId("1514651968 (git-archive-version)"), "2017.12.30") def test_getVersionFromArchiveIdtag(self): self.assertEqual(self.m.getVersionFromArchiveId('1514808197 (HEAD -> master, tag: v1.0.0)'), "1.0.0") class VersioningUtilsTests_PKG(VersioningUtilsTests): module_under_test = "buildbot_pkg" class VersioningUtilsTests_WORKER(VersioningUtilsTests): module_under_test = "buildbot_worker" buildbot-2.6.0/master/buildbot/test/unit/test_wamp_connector.py000066400000000000000000000063661361162603000250200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.wamp import connector class FakeConfig: mq = dict(type='wamp', router_url="wss://foo", realm="bb") class FakeService(service.AsyncMultiService): name = "fakeWampService" # Fake wamp service # just call the maker on demand by the test def __init__(self, url, realm, make, extra=None, debug=False, debug_wamp=False, debug_app=False): super().__init__() self.make = make self.extra = extra def gotConnection(self): self.make(None) r = self.make(self) r.publish = mock.Mock(spec=r.publish) r.register = mock.Mock(spec=r.register) r.subscribe = mock.Mock(spec=r.subscribe) r.onJoin(None) class TestedWampConnector(connector.WampConnector): serviceClass = FakeService class WampConnector(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() master = fakemaster.make_master(self) self.connector = TestedWampConnector() yield self.connector.setServiceParent(master) yield master.startService() yield self.connector.reconfigServiceWithBuildbotConfig(FakeConfig()) @defer.inlineCallbacks def test_startup(self): d = self.connector.getService() self.connector.app.gotConnection() yield d # 824 is the hardcoded masterid of fakemaster self.connector.service.publish.assert_called_with( "org.buildbot.824.connected") @defer.inlineCallbacks def test_subscribe(self): d = self.connector.subscribe('callback', 'topic', 'options') self.connector.app.gotConnection() yield d self.connector.service.subscribe.assert_called_with( 'callback', 'topic', 'options') @defer.inlineCallbacks def test_publish(self): d = self.connector.publish('topic', 'data', 'options') self.connector.app.gotConnection() yield d self.connector.service.publish.assert_called_with( 'topic', 'data', options='options') @defer.inlineCallbacks def test_OnLeave(self): d = self.connector.getService() self.connector.app.gotConnection() yield d self.assertTrue(self.connector.master.running) self.connector.service.onLeave(None) self.assertFalse(self.connector.master.running) buildbot-2.6.0/master/buildbot/test/unit/test_worker_base.py000066400000000000000000000623211361162603000242760ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from parameterized import parameterized import mock from twisted.internet import defer from twisted.trial import unittest from buildbot import config from buildbot import locks from buildbot.machine.base import Machine from buildbot.process import properties from buildbot.test.fake import bworkermanager from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.fake import worker from buildbot.test.util import interfaces from buildbot.test.util import logging from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import AbstractLatentWorker from buildbot.worker import base class ConcreteWorker(base.AbstractWorker): pass class FakeBuilder: def getBuilderId(self): return defer.succeed(1) class WorkerInterfaceTests(interfaces.InterfaceTests): def test_attr_workername(self): self.assertTrue(hasattr(self.wrk, 'workername')) def test_attr_properties(self): self.assertTrue(hasattr(self.wrk, 'properties')) def test_attr_defaultProperties(self): self.assertTrue(hasattr(self.wrk, 'defaultProperties')) @defer.inlineCallbacks def test_attr_worker_basedir(self): yield self.callAttached() self.assertIsInstance(self.wrk.worker_basedir, str) @defer.inlineCallbacks def test_attr_path_module(self): yield self.callAttached() self.assertTrue(hasattr(self.wrk, 'path_module')) @defer.inlineCallbacks def test_attr_worker_system(self): yield self.callAttached() self.assertTrue(hasattr(self.wrk, 'worker_system')) def test_signature_acquireLocks(self): @self.assertArgSpecMatches(self.wrk.acquireLocks) def acquireLocks(self): pass def test_signature_releaseLocks(self): @self.assertArgSpecMatches(self.wrk.releaseLocks) def releaseLocks(self): pass def test_signature_attached(self): @self.assertArgSpecMatches(self.wrk.attached) def attached(self, conn): pass def test_signature_detached(self): @self.assertArgSpecMatches(self.wrk.detached) def detached(self): pass def test_signature_addWorkerForBuilder(self): @self.assertArgSpecMatches(self.wrk.addWorkerForBuilder) def addWorkerForBuilder(self, wfb): pass def test_signature_removeWorkerForBuilder(self): @self.assertArgSpecMatches(self.wrk.removeWorkerForBuilder) def removeWorkerForBuilder(self, wfb): pass def test_signature_buildFinished(self): @self.assertArgSpecMatches(self.wrk.buildFinished) def buildFinished(self, wfb): pass def test_signature_canStartBuild(self): @self.assertArgSpecMatches(self.wrk.canStartBuild) def canStartBuild(self): pass class RealWorkerItfc(TestReactorMixin, unittest.TestCase, WorkerInterfaceTests): def setUp(self): self.setUpTestReactor() self.wrk = ConcreteWorker('wrk', 'pa') @defer.inlineCallbacks def callAttached(self): self.master = fakemaster.make_master(self, wantData=True) yield self.master.workers.disownServiceParent() self.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) self.master.workers = self.workers yield self.wrk.setServiceParent(self.master.workers) self.conn = fakeprotocol.FakeConnection(self.master, self.wrk) yield self.wrk.attached(self.conn) class FakeWorkerItfc(TestReactorMixin, unittest.TestCase, WorkerInterfaceTests): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.wrk = worker.FakeWorker(self.master) def callAttached(self): self.conn = fakeprotocol.FakeConnection(self.master, self.wrk) return self.wrk.attached(self.conn) class TestAbstractWorker(logging.LoggingMixin, TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.setUpLogging() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster yield self.master.workers.disownServiceParent() self.workers = self.master.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) @defer.inlineCallbacks def createWorker(self, name='bot', password='pass', attached=False, configured=True, **kwargs): worker = ConcreteWorker(name, password, **kwargs) if configured: yield worker.setServiceParent(self.workers) if attached: worker.conn = fakeprotocol.FakeConnection(self.master, worker) return worker @defer.inlineCallbacks def createMachine(self, name, configured=True, **kwargs): machine = Machine(name) if configured: yield machine.setServiceParent(self.master.machine_manager) return machine @defer.inlineCallbacks def test_constructor_minimal(self): bs = yield self.createWorker('bot', 'pass') yield bs.startService() self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.max_builds, None) self.assertEqual(bs.notify_on_missing, []) self.assertEqual(bs.missing_timeout, ConcreteWorker.DEFAULT_MISSING_TIMEOUT) self.assertEqual(bs.properties.getProperty('workername'), 'bot') self.assertEqual(bs.access, []) @defer.inlineCallbacks def test_constructor_full(self): lock1, lock2 = locks.MasterLock('lock1'), locks.MasterLock('lock2') access1, access2 = lock1.access('counting'), lock2.access('counting') bs = yield self.createWorker('bot', 'pass', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}, locks=[access1, access2]) yield bs.startService() self.assertEqual(bs.max_builds, 2) self.assertEqual(bs.notify_on_missing, ['me@me.com']) self.assertEqual(bs.missing_timeout, 120) self.assertEqual(bs.properties.getProperty('a'), 'b') self.assertEqual(bs.access, [access1, access2]) @defer.inlineCallbacks def test_constructor_notify_on_missing_not_list(self): bs = yield self.createWorker('bot', 'pass', notify_on_missing='foo@foo.com') yield bs.startService() # turned into a list: self.assertEqual(bs.notify_on_missing, ['foo@foo.com']) def test_constructor_notify_on_missing_not_string(self): with self.assertRaises(config.ConfigErrors): ConcreteWorker('bot', 'pass', notify_on_missing=['a@b.com', 13]) @defer.inlineCallbacks def do_test_reconfigService(self, old, new, existingRegistration=True): old.parent = self.master if existingRegistration: old.registration = bworkermanager.FakeWorkerRegistration(old) old.missing_timer = mock.Mock(name='missing_timer') if not old.running: yield old.startService() yield old.reconfigServiceWithSibling(new) @defer.inlineCallbacks def test_reconfigService_attrs(self): old = yield self.createWorker('bot', 'pass', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) new = yield self.createWorker('bot', 'pass', configured=False, max_builds=3, notify_on_missing=['her@me.com'], missing_timeout=121, properties={'a': 'c'}) old.updateWorker = mock.Mock(side_effect=lambda: defer.succeed(None)) yield self.do_test_reconfigService(old, new) self.assertEqual(old.max_builds, 3) self.assertEqual(old.notify_on_missing, ['her@me.com']) self.assertEqual(old.missing_timeout, 121) self.assertEqual(old.properties.getProperty('a'), 'c') self.assertEqual(old.registration.updates, ['bot']) self.assertTrue(old.updateWorker.called) @defer.inlineCallbacks def test_reconfigService_has_properties(self): old = yield self.createWorker(name="bot", password="pass") yield self.do_test_reconfigService(old, old) self.assertTrue(old.properties.getProperty('workername'), 'bot') @defer.inlineCallbacks def test_setupProperties(self): props = properties.Properties() props.setProperty('foo', 1, 'Scheduler') props.setProperty('bar', 'bleh', 'Change') props.setProperty('omg', 'wtf', 'Builder') wrkr = yield self.createWorker( 'bot', 'passwd', defaultProperties={'bar': 'onoes', 'cuckoo': 42}) wrkr.setupProperties(props) self.assertEquals(props.getProperty('bar'), 'bleh') self.assertEquals(props.getProperty('cuckoo'), 42) @defer.inlineCallbacks def test_reconfigService_initial_registration(self): old = yield self.createWorker('bot', 'pass') yield self.do_test_reconfigService(old, old, existingRegistration=False) self.assertIn('bot', self.master.workers.registrations) self.assertEqual(old.registration.updates, ['bot']) @defer.inlineCallbacks def test_reconfigService_builder(self): old = yield self.createWorker('bot', 'pass') yield self.do_test_reconfigService(old, old) # initial configuration, there is no builder configured self.assertEqual(old._configured_builderid_list, []) workers = yield self.master.data.get(('workers',)) self.assertEqual(len(workers[0]['configured_on']), 0) new = yield self.createWorker('bot', 'pass', configured=False) # we create a fake builder, and associate to the master self.botmaster.builders['bot'] = [FakeBuilder()] self.master.db.insertTestData([ fakedb.Builder(id=1, name='builder'), fakedb.BuilderMaster(builderid=1, masterid=824) ]) # on reconfig, the db should see the builder configured for this worker yield old.reconfigServiceWithSibling(new) self.assertEqual(old._configured_builderid_list, [1]) workers = yield self.master.data.get(('workers',)) self.assertEqual(len(workers[0]['configured_on']), 1) self.assertEqual(workers[0]['configured_on'][0]['builderid'], 1) @defer.inlineCallbacks def test_reconfig_service_no_machine(self): old = yield self.createWorker('bot', 'pass') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertIsNone(old.machine) @defer.inlineCallbacks def test_reconfig_service_with_machine_initial(self): machine = yield self.createMachine('machine1') old = yield self.createWorker('bot', 'pass', machine_name='machine1') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertIs(old.machine, machine) @defer.inlineCallbacks def test_reconfig_service_with_unknown_machine(self): old = yield self.createWorker('bot', 'pass', machine_name='machine1') self.assertIsNone(old.machine) yield self.do_test_reconfigService(old, old) self.assertLogged('Unknown machine') @parameterized.expand([ ('None_to_machine_initial', False, None, None, 'machine1', 'machine1'), ('None_to_machine', True, None, None, 'machine1', 'machine1'), ('machine_to_None_initial', False, 'machine1', None, None, None), ('machine_to_None', True, 'machine1', 'machine1', None, None), ('machine_to_same_machine_initial', False, 'machine1', None, 'machine1', 'machine1'), ('machine_to_same_machine', True, 'machine1', 'machine1', 'machine1', 'machine1'), ('machine_to_another_machine_initial', False, 'machine1', None, 'machine2', 'machine2'), ('machine_to_another_machine', True, 'machine1', 'machine1', 'machine2', 'machine2'), ]) @defer.inlineCallbacks def test_reconfig_service_machine(self, test_name, do_initial_self_reconfig, old_machine_name, expected_old_machine_name, new_machine_name, expected_new_machine_name): machine1 = yield self.createMachine('machine1') machine2 = yield self.createMachine('machine2') name_to_machine = { None: None, machine1.name: machine1, machine2.name: machine2, } expected_old_machine = name_to_machine[expected_old_machine_name] expected_new_machine = name_to_machine[expected_new_machine_name] old = yield self.createWorker('bot', 'pass', machine_name=old_machine_name) new = yield self.createWorker('bot', 'pass', configured=False, machine_name=new_machine_name) if do_initial_self_reconfig: yield self.do_test_reconfigService(old, old) self.assertIs(old.machine, expected_old_machine) yield self.do_test_reconfigService(old, new) self.assertIs(old.machine, expected_new_machine) @defer.inlineCallbacks def test_stopService(self): worker = yield self.createWorker() yield worker.startService() reg = worker.registration yield worker.stopService() self.assertTrue(reg.unregistered) self.assertEqual(worker.registration, None) # FIXME: Test that reconfig properly deals with # 1) locks # 2) telling worker about builder # 3) missing timer # in both the initial config and a reconfiguration. def test_startMissingTimer_no_parent(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=10) bs.startMissingTimer() self.assertEqual(bs.missing_timer, None) def test_startMissingTimer_no_timeout(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=0) bs.parent = mock.Mock() bs.startMissingTimer() self.assertEqual(bs.missing_timer, None) def test_startMissingTimer_no_notify(self): bs = ConcreteWorker('bot', 'pass', missing_timeout=3600) bs.parent = mock.Mock() bs.running = True bs.startMissingTimer() self.assertNotEqual(bs.missing_timer, None) def test_missing_timer(self): bs = ConcreteWorker('bot', 'pass', notify_on_missing=['abc'], missing_timeout=100) bs.parent = mock.Mock() bs.running = True bs.startMissingTimer() self.assertNotEqual(bs.missing_timer, None) bs.stopMissingTimer() self.assertEqual(bs.missing_timer, None) @defer.inlineCallbacks def test_setServiceParent_started(self): master = self.master bsmanager = master.workers yield master.startService() bs = ConcreteWorker('bot', 'pass') yield bs.setServiceParent(bsmanager) self.assertEqual(bs.manager, bsmanager) self.assertEqual(bs.parent, bsmanager) self.assertEqual(bsmanager.master, master) self.assertEqual(bs.master, master) @defer.inlineCallbacks def test_setServiceParent_masterLocks(self): """ http://trac.buildbot.net/ticket/2278 """ master = self.master bsmanager = master.workers yield master.startService() lock = locks.MasterLock('masterlock') bs = ConcreteWorker('bot', 'pass', locks=[lock.access("counting")]) yield bs.setServiceParent(bsmanager) @defer.inlineCallbacks def test_setServiceParent_workerLocks(self): """ http://trac.buildbot.net/ticket/2278 """ master = self.master bsmanager = master.workers yield master.startService() lock = locks.WorkerLock('lock') bs = ConcreteWorker('bot', 'pass', locks=[lock.access("counting")]) yield bs.setServiceParent(bsmanager) @defer.inlineCallbacks def test_startService_getWorkerInfo_empty(self): worker = yield self.createWorker() yield worker.startService() self.assertEqual(worker.worker_status.getAdmin(), None) self.assertEqual(worker.worker_status.getHost(), None) self.assertEqual(worker.worker_status.getAccessURI(), None) self.assertEqual(worker.worker_status.getVersion(), None) # check that a new worker row was added for this worker bs = yield self.master.db.workers.getWorker(name='bot') self.assertEqual(bs['name'], 'bot') @defer.inlineCallbacks def test_startService_getWorkerInfo_fromDb(self): self.master.db.insertTestData([ fakedb.Worker(id=9292, name='bot', info={ 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'version': 'TheVersion' }) ]) worker = yield self.createWorker() yield worker.startService() self.assertEqual(worker.workerid, 9292) self.assertEqual(worker.worker_status.getAdmin(), 'TheAdmin') self.assertEqual(worker.worker_status.getHost(), 'TheHost') self.assertEqual(worker.worker_status.getAccessURI(), 'TheURI') self.assertEqual(worker.worker_status.getVersion(), 'TheVersion') @defer.inlineCallbacks def test_attached_remoteGetWorkerInfo(self): worker = yield self.createWorker() yield worker.startService() ENVIRON = {} COMMANDS = {'cmd1': '1', 'cmd2': '1'} conn = fakeprotocol.FakeConnection(worker.master, worker) conn.info = { 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'environ': ENVIRON, 'basedir': 'TheBaseDir', 'system': 'TheWorkerSystem', 'version': 'version', 'worker_commands': COMMANDS, } yield worker.attached(conn) # check the values get set right self.assertEqual(worker.worker_status.getAdmin(), "TheAdmin") self.assertEqual(worker.worker_status.getHost(), "TheHost") self.assertEqual(worker.worker_status.getAccessURI(), "TheURI") self.assertEqual(worker.worker_environ, ENVIRON) self.assertEqual(worker.worker_basedir, 'TheBaseDir') self.assertEqual(worker.worker_system, 'TheWorkerSystem') self.assertEqual(worker.worker_commands, COMMANDS) @defer.inlineCallbacks def test_attached_callsMaybeStartBuildsForWorker(self): worker = yield self.createWorker() yield worker.startService() yield worker.reconfigServiceWithSibling(worker) conn = fakeprotocol.FakeConnection(worker.master, worker) conn.info = {} yield worker.attached(conn) self.assertEqual(self.botmaster.buildsStartedForWorkers, ["bot"]) @defer.inlineCallbacks def test_attached_workerInfoUpdates(self): # put in stale info: self.master.db.insertTestData([ fakedb.Worker(name='bot', info={ 'admin': 'WrongAdmin', 'host': 'WrongHost', 'access_uri': 'WrongURI', 'version': 'WrongVersion' }) ]) worker = yield self.createWorker() yield worker.startService() conn = fakeprotocol.FakeConnection(worker.master, worker) conn.info = { 'admin': 'TheAdmin', 'host': 'TheHost', 'access_uri': 'TheURI', 'version': 'TheVersion', } yield worker.attached(conn) self.assertEqual(worker.worker_status.getAdmin(), 'TheAdmin') self.assertEqual(worker.worker_status.getHost(), 'TheHost') self.assertEqual(worker.worker_status.getAccessURI(), 'TheURI') self.assertEqual(worker.worker_status.getVersion(), 'TheVersion') # and the db is updated too: db_worker = yield self.master.db.workers.getWorker(name="bot") self.assertEqual(db_worker['workerinfo']['admin'], 'TheAdmin') self.assertEqual(db_worker['workerinfo']['host'], 'TheHost') self.assertEqual(db_worker['workerinfo']['access_uri'], 'TheURI') self.assertEqual(db_worker['workerinfo']['version'], 'TheVersion') @defer.inlineCallbacks def test_worker_shutdown(self): worker = yield self.createWorker(attached=True) yield worker.startService() yield worker.shutdown() self.assertEqual( worker.conn.remoteCalls, [('remoteSetBuilderList', []), ('remoteShutdown',)]) @defer.inlineCallbacks def test_worker_shutdown_not_connected(self): worker = yield self.createWorker(attached=False) yield worker.startService() # No exceptions should be raised here yield worker.shutdown() @defer.inlineCallbacks def test_shutdownRequested(self): worker = yield self.createWorker(attached=False) yield worker.startService() yield worker.shutdownRequested() self.assertEqual(worker._graceful, True) @defer.inlineCallbacks def test_missing_timer_missing(self): worker = yield self.createWorker(attached=False, missing_timeout=1) yield worker.startService() self.assertNotEqual(worker.missing_timer, None) yield self.reactor.advance(1) self.assertEqual(worker.missing_timer, None) self.assertEqual(len(self.master.data.updates.missingWorkers), 1) @defer.inlineCallbacks def test_missing_timer_stopped(self): worker = yield self.createWorker(attached=False, missing_timeout=1) yield worker.startService() self.assertNotEqual(worker.missing_timer, None) yield worker.stopService() self.assertEqual(worker.missing_timer, None) self.assertEqual(len(self.master.data.updates.missingWorkers), 0) @defer.inlineCallbacks def test_worker_actions_stop(self): worker = yield self.createWorker(attached=False) yield worker.startService() worker.controlWorker(("worker", 1, "stop"), {'reason': "none"}) self.assertEqual(worker._graceful, True) @defer.inlineCallbacks def test_worker_actions_kill(self): worker = yield self.createWorker(attached=False) yield worker.startService() worker.controlWorker(("worker", 1, "kill"), {'reason': "none"}) self.assertEqual(worker.conn, None) @defer.inlineCallbacks def test_worker_actions_pause(self): worker = yield self.createWorker(attached=False) yield worker.startService() worker.controlWorker(("worker", 1, "pause"), {'reason': "none"}) self.assertEqual(worker._paused, True) worker.controlWorker(("worker", 1, "unpause"), {'reason': "none"}) self.assertEqual(worker._paused, False) class TestAbstractLatentWorker(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster yield self.master.workers.disownServiceParent() self.workers = self.master.workers = bworkermanager.FakeWorkerManager() yield self.workers.setServiceParent(self.master) @defer.inlineCallbacks def do_test_reconfigService(self, old, new, existingRegistration=True): old.parent = self.master if existingRegistration: old.registration = bworkermanager.FakeWorkerRegistration(old) old.missing_timer = mock.Mock(name='missing_timer') yield old.startService() yield old.reconfigServiceWithSibling(new) @defer.inlineCallbacks def test_reconfigService(self): old = AbstractLatentWorker( "name", "password", build_wait_timeout=10) new = AbstractLatentWorker( "name", "password", build_wait_timeout=30) yield self.do_test_reconfigService(old, new) self.assertEqual(old.build_wait_timeout, 30) buildbot-2.6.0/master/buildbot/test/unit/test_worker_docker.py000066400000000000000000000344711361162603000246400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot import config from buildbot import interfaces from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.process.properties import Property from buildbot.test.fake import docker from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import docker as dockerworker class TestDockerLatentWorker(unittest.SynchronousTestCase, TestReactorMixin): def setupWorker(self, *args, **kwargs): self.patch(dockerworker, 'docker', docker) worker = dockerworker.DockerLatentWorker(*args, **kwargs) master = fakemaster.make_master(self, wantData=True) fakemaster.master = master worker.setServiceParent(master) self.successResultOf(master.startService()) self.addCleanup(master.stopService) return worker def setUp(self): self.setUpTestReactor() self.build = Properties( image='busybox:latest', builder='docker_worker', distro='wheezy') self.patch(dockerworker, 'client', docker) def test_constructor_nodocker(self): self.patch(dockerworker, 'client', None) with self.assertRaises(config.ConfigErrors): self.setupWorker('bot', 'pass', 'unix://tmp.sock', 'debian:wheezy', []) def test_constructor_noimage_nodockerfile(self): with self.assertRaises(config.ConfigErrors): self.setupWorker('bot', 'pass', 'http://localhost:2375') def test_constructor_noimage_dockerfile(self): bs = self.setupWorker( 'bot', 'pass', 'http://localhost:2375', dockerfile="FROM ubuntu") self.assertEqual(bs.dockerfile, "FROM ubuntu") self.assertEqual(bs.image, None) def test_constructor_image_nodockerfile(self): bs = self.setupWorker( 'bot', 'pass', 'http://localhost:2375', image="myworker") self.assertEqual(bs.dockerfile, None) self.assertEqual(bs.image, 'myworker') def test_constructor_minimal(self): # Minimal set of parameters bs = self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.client_args, {'base_url': 'tcp://1234:2375'}) self.assertEqual(bs.image, 'worker') self.assertEqual(bs.command, []) def test_contruction_minimal_docker_py(self): docker.version = "1.10.6" bs = self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') id, name = self.successResultOf(bs.start_instance(self.build)) client = docker.APIClient.latest self.assertEqual(client.called_class_name, "Client") client = docker.Client.latest self.assertNotEqual(client.called_class_name, "APIClient") def test_contruction_minimal_docker(self): docker.version = "2.0.0" bs = self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker') id, name = self.successResultOf(bs.start_instance(self.build)) client = docker.Client.latest self.assertEqual(client.called_class_name, "APIClient") client = docker.APIClient.latest self.assertNotEqual(client.called_class_name, "Client") def test_constructor_nopassword(self): # when no password, it is created automatically bs = self.setupWorker('bot', None, 'tcp://1234:2375', 'worker') self.assertEqual(bs.workername, 'bot') self.assertEqual(len(bs.password), 20) def test_constructor_all_docker_parameters(self): # Volumes have their own tests bs = self.setupWorker('bot', 'pass', 'unix:///var/run/docker.sock', 'worker_img', ['/bin/sh'], dockerfile="FROM ubuntu", version='1.9', tls=True, hostconfig={'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4']}, custom_context=False, buildargs=None, encoding='gzip') self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.image, 'worker_img') self.assertEqual(bs.command, ['/bin/sh']) self.assertEqual(bs.dockerfile, "FROM ubuntu") self.assertEqual(bs.volumes, []) self.assertEqual(bs.client_args, { 'base_url': 'unix:///var/run/docker.sock', 'version': '1.9', 'tls': True}) self.assertEqual( bs.hostconfig, {'network_mode': 'fake', 'dns': ['1.1.1.1', '1.2.3.4']}) self.assertFalse(bs.custom_context) self.assertEqual(bs.buildargs, None) self.assertEqual(bs.encoding, 'gzip') def test_start_instance_volume_renderable(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=[Interpolate('/data:/worker/%(kw:builder)s/build', builder=Property('builder'))]) id, name = self.successResultOf(bs.start_instance(self.build)) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/worker/docker_worker/build']) def test_volume_no_suffix(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=['/src/webapp:/opt/webapp']) self.successResultOf(bs.start_instance(self.build)) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(len(client.call_args_create_host_config), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/opt/webapp']) self.assertEqual(client.call_args_create_host_config[0]['binds'], ["/src/webapp:/opt/webapp"]) def test_volume_ro_rw(self): bs = self.setupWorker('bot', 'pass', 'tcp://1234:2375', 'worker', ['bin/bash'], volumes=['/src/webapp:/opt/webapp:ro', '~:/backup:rw']) self.successResultOf(bs.start_instance(self.build)) client = docker.Client.latest self.assertEqual(len(client.call_args_create_container), 1) self.assertEqual(len(client.call_args_create_host_config), 1) self.assertEqual(client.call_args_create_container[0]['volumes'], ['/opt/webapp', '/backup']) self.assertEqual(client.call_args_create_host_config[0]['binds'], ['/src/webapp:/opt/webapp:ro', '~:/backup:rw']) def test_volume_bad_format(self): with self.assertRaises(config.ConfigErrors): self.setupWorker('bot', 'pass', 'http://localhost:2375', image="worker", volumes=['abcd=efgh']) def test_volume_bad_format_renderable(self): bs = self.setupWorker( 'bot', 'pass', 'http://localhost:2375', image="worker", volumes=[Interpolate('/data==/worker/%(kw:builder)s/build', builder=Property('builder'))]) f = self.failureResultOf(bs.start_instance(self.build)) f.check(config.ConfigErrors) def test_start_instance_image_no_version(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox', ['bin/bash']) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'busybox') def test_start_instance_image_right_version(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox:latest', ['bin/bash']) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'busybox:latest') def test_start_instance_image_wrong_version(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'busybox:123', ['bin/bash']) f = self.failureResultOf(bs.start_instance(self.build)) f.check(interfaces.LatentWorkerFailedToSubstantiate) def test_start_instance_image_renderable(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', Property('image'), ['bin/bash']) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'busybox:latest') def test_start_instance_noimage_nodockerfile(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', ['bin/bash']) f = self.failureResultOf(bs.start_instance(self.build)) f.check(interfaces.LatentWorkerFailedToSubstantiate) def test_start_instance_image_and_dockefile(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile='BUG') f = self.failureResultOf(bs.start_instance(self.build)) f.check(interfaces.LatentWorkerFailedToSubstantiate) def test_start_instance_noimage_gooddockerfile(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile='FROM debian:wheezy') id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'customworker') def test_start_instance_noimage_pull(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'alpine:latest', autopull=True) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'alpine:latest') def test_start_instance_image_pull(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=True) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 0) def test_start_instance_image_alwayspull(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=True, alwaysPull=True) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 1) def test_start_instance_image_noauto_alwayspull(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', autopull=False, alwaysPull=True) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') client = docker.Client.latest self.assertEqual(client._pullCount, 0) def test_start_instance_noimage_renderabledockerfile(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'customworker', dockerfile=Interpolate('FROM debian:%(kw:distro)s', distro=Property('distro'))) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'customworker') def test_start_instance_custom_context_and_buildargs(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), custom_context=True, buildargs={'sample_arg1': 'test_val1'}) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') def test_start_instance_custom_context_no_buildargs(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), custom_context=True) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') def test_start_instance_buildargs_no_custom_context(self): bs = self.setupWorker( 'bot', 'pass', 'tcp://1234:2375', 'tester:latest', dockerfile=Interpolate('FROM debian:latest'), buildargs={'sample_arg1': 'test_val1'}) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'tester:latest') def test_start_worker_but_already_created_with_same_name(self): bs = self.setupWorker( 'existing', 'pass', 'tcp://1234:2375', 'busybox:latest', ['bin/bash']) id, name = self.successResultOf(bs.start_instance(self.build)) self.assertEqual(name, 'busybox:latest') class testDockerPyStreamLogs(unittest.TestCase): def compare(self, result, log): self.assertEqual(result, list(dockerworker._handle_stream_line(log))) def testEmpty(self): self.compare([], '{"stream":"\\n"}\r\n') def testOneLine(self): self.compare( [" ---> Using cache"], '{"stream":" ---\\u003e Using cache\\n"}\r\n') def testMultipleLines(self): self.compare(["Fetched 8298 kB in 3s (2096 kB/s)", "Reading package lists..."], '{"stream":"Fetched 8298 kB in 3s (2096 kB/s)\\nReading package lists..."}\r\n') def testError(self): self.compare(["ERROR: The command [/bin/sh -c apt-get update && apt-get install -y" " python-dev python-pip] returned a non-zero code: 127"], '{"errorDetail": {"message": "The command [/bin/sh -c apt-get update && ' 'apt-get install -y python-dev python-pip] returned a non-zero code: 127"},' ' "error": "The command [/bin/sh -c apt-get update && apt-get install -y' ' python-dev python-pip] returned a non-zero code: 127"}\r\n') buildbot-2.6.0/master/buildbot/test/unit/test_worker_ec2.py000066400000000000000000000606711361162603000240430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2014 Longaccess private company import os from twisted.trial import unittest from buildbot.test.util.decorators import flaky from buildbot.test.util.warnings import assertNotProducesWarnings from buildbot.test.util.warnings import assertProducesWarnings from buildbot.worker_transition import DeprecatedWorkerNameWarning try: from moto import mock_ec2 assert mock_ec2 import boto3 assert boto3 except ImportError: boto3 = None ec2 = None if boto3 is not None: from buildbot.worker import ec2 # pylint: disable=ungrouped-imports # Current moto (1.3.7) requires dummy credentials to work # https://github.com/spulec/moto/issues/1924 os.environ['AWS_SECRET_ACCESS_KEY'] = 'foobar_secret' os.environ['AWS_ACCESS_KEY_ID'] = 'foobar_key' # redefine the mock_ec2 decorator to skip the test if boto3 or moto # isn't installed def skip_ec2(f): f.skip = "boto3 or moto is not installed" return f if boto3 is None: mock_ec2 = skip_ec2 def anyImageId(c): for image in c.describe_images()['Images']: return image['ImageId'] return 'foo' class TestEC2LatentWorker(unittest.TestCase): ec2_connection = None def setUp(self): super(TestEC2LatentWorker, self).setUp() if boto3 is None: raise unittest.SkipTest("moto not found") def botoSetup(self, name='latent_buildbot_worker'): # the proxy system is also not properly mocked, so we need to delete environment variables for env in ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY']: if env in os.environ: del os.environ[env] # create key pair is not correctly mocked and need to have fake aws creds configured kw = dict(region_name='us-east-1', aws_access_key_id='ACCESS_KEY', aws_secret_access_key='SECRET_KEY', aws_session_token='SESSION_TOKEN') c = boto3.client('ec2', **kw) r = boto3.resource('ec2', **kw) try: r.create_key_pair(KeyName=name) except NotImplementedError: raise unittest.SkipTest("KeyPairs.create_key_pair not implemented" " in this version of moto, please update.") r.create_security_group(GroupName=name, Description='the security group') instance = r.create_instances(ImageId=anyImageId(c), MinCount=1, MaxCount=1)[0] c.create_image(InstanceId=instance.id, Name="foo", Description="bar") c.terminate_instances(InstanceIds=[instance.id]) return c, r @mock_ec2 def test_constructor_minimal(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id, ) self.assertEqual(bs.workername, 'bot1') self.assertEqual(bs.password, 'sekrit') self.assertEqual(bs.instance_type, 'm1.large') self.assertEqual(bs.ami, amis[0].id) @mock_ec2 def test_constructor_tags(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) tags = {'foo': 'bar'} bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', tags=tags, ami=amis[0].id, ) self.assertEqual(bs.tags, tags) @mock_ec2 def test_constructor_region(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, region='us-west-1' ) self.assertEqual(bs.session.region_name, 'us-west-1') @mock_ec2 def test_fail_mixing_classic_and_vpc_ec2_settings(self): c, r = self.botoSetup() amis = list(r.images.all()) def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', keypair_name="test_key", identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, security_name="classic", subnet_id="sn-1234" ) with self.assertRaises(ValueError): create_worker() @mock_ec2 def test_start_vpc_instance(self): c, r = self.botoSetup() vpc = r.create_vpc(CidrBlock="192.168.0.0/24") subnet = r.create_subnet(VpcId=vpc.id, CidrBlock="192.168.0.0/24") amis = list(r.images.all()) sg = r.create_security_group(GroupName="test_sg", Description="test_sg", VpcId=vpc.id) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_group_ids=[sg.id], subnet_id=subnet.id, ami=amis[0].id ) bs._poll_resolution = 0 instance_id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertEqual(instances[0].subnet_id, subnet.id) self.assertEqual(len(instances[0].security_groups), 1) self.assertEqual(instances[0].security_groups[0]['GroupId'], sg.id) self.assertEqual(instances[0].key_name, 'latent_buildbot_worker') @mock_ec2 def test_start_instance(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id ) bs._poll_resolution = 1 instance_id, image_id, start_time = bs._start_instance() self.assertTrue(instance_id.startswith('i-')) self.assertTrue(image_id.startswith('ami-')) self.assertTrue(start_time > "00:00:00") instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertIsNone(instances[0].tags) self.assertEqual(instances[0].id, bs.properties.getProperty('instance')) @mock_ec2 def test_start_instance_volumes_deprecated(self): c, r = self.botoSetup() block_device_map_arg = { "/dev/xvdb": { "volume_type": "io1", "iops": 10, "size": 20, }, "/dev/xvdc": { "volume_type": "gp2", "size": 30, "delete_on_termination": False } } block_device_map_res = [ { 'DeviceName': "/dev/xvdb", 'Ebs': { "VolumeType": "io1", "Iops": 10, "VolumeSize": 20, "DeleteOnTermination": True, } }, { 'DeviceName': "/dev/xvdc", 'Ebs': { "VolumeType": "gp2", "VolumeSize": 30, "DeleteOnTermination": False, } }, ] amis = list(r.images.all()) with assertProducesWarnings( DeprecatedWorkerNameWarning, messages_patterns=[ r"Use of dict value to 'block_device_map' of EC2LatentWorker " r"constructor is deprecated. Please use a list matching the AWS API" ]): bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, block_device_map=block_device_map_arg ) # moto does not currently map volumes properly. below ensures # that my conversion code properly composes it, including # delete_on_termination default. self.assertEqual(sorted(block_device_map_res, key=lambda x: x['DeviceName']), sorted(bs.block_device_map, key=lambda x: x['DeviceName'])) @mock_ec2 def test_start_instance_volumes(self): c, r = self.botoSetup() block_device_map_arg = [ { 'DeviceName': "/dev/xvdb", 'Ebs': { "VolumeType": "io1", "Iops": 10, "VolumeSize": 20, } }, { 'DeviceName': "/dev/xvdc", 'Ebs': { "VolumeType": "gp2", "VolumeSize": 30, "DeleteOnTermination": False, } }, ] block_device_map_res = [ { 'DeviceName': "/dev/xvdb", 'Ebs': { "VolumeType": "io1", "Iops": 10, "VolumeSize": 20, "DeleteOnTermination": True, } }, { 'DeviceName': "/dev/xvdc", 'Ebs': { "VolumeType": "gp2", "VolumeSize": 30, "DeleteOnTermination": False, } }, ] amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, block_device_map=block_device_map_arg ) # moto does not currently map volumes properly. below ensures # that my conversion code properly composes it, including # delete_on_termination default. self.assertEqual(block_device_map_res, bs.block_device_map) @mock_ec2 def test_start_instance_attach_volume(self): c, r = self.botoSetup() vol = r.create_volume(Size=10, AvailabilityZone='us-east-1a') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=ami.id, volumes=[(vol.id, "/dev/sdz")] ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) instance = instances[0] sdz = [bm for bm in instance.block_device_mappings if bm['DeviceName'] == '/dev/sdz'][0] self.assertEqual(vol.id, sdz['Ebs']['VolumeId']) @mock_ec2 def test_start_instance_tags(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) tags = {'foo': 'bar'} bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', tags=tags, ami=amis[0].id ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, id) self.assertEqual(instances[0].tags, [{'Value': 'bar', 'Key': 'foo'}]) @mock_ec2 def test_start_instance_ip(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) eip = c.allocate_address(Domain='vpc') elastic_ip = eip['PublicIp'] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', elastic_ip=elastic_ip, ami=amis[0].id ) bs._poll_resolution = 0 id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) addresses = c.describe_addresses()['Addresses'] self.assertEqual(instances[0].id, addresses[0]['InstanceId']) @mock_ec2 def test_start_vpc_spot_instance(self): c, r = self.botoSetup() vpc = r.create_vpc(CidrBlock="192.168.0.0/24") subnet = r.create_subnet(VpcId=vpc.id, CidrBlock="192.168.0.0/24") amis = list(r.images.all()) sg = r.create_security_group(GroupName="test_sg", Description="test_sg", VpcId=vpc.id) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", ami=amis[0].id, spot_instance=True, max_spot_price=1.5, security_group_ids=[sg.id], subnet_id=subnet.id, ) bs._poll_resolution = 0 instance_id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertTrue(bs.spot_instance) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertEqual(instances[0].subnet_id, subnet.id) self.assertEqual(len(instances[0].security_groups), 1) self.assertEqual(instances[0].security_groups[0]['GroupId'], sg.id) @mock_ec2 def test_start_spot_instance(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) product_description = 'Linux/Unix' bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name='keypair_name', security_name='security_name', ami=amis[0].id, spot_instance=True, max_spot_price=1.5, product_description=product_description ) bs._poll_resolution = 0 instance_id, _, _ = bs._start_instance() instances = r.instances.filter( Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]) instances = list(instances) self.assertTrue(bs.spot_instance) self.assertEqual(bs.product_description, product_description) self.assertEqual(len(instances), 1) self.assertEqual(instances[0].id, instance_id) self.assertIsNone(instances[0].tags) @mock_ec2 def test_get_image_ami(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=ami.id ) image = bs.get_image() self.assertEqual(image.id, ami.id) @flaky(issueNumber=3936) @mock_ec2 def test_get_image_owners(self): c, r = self.botoSetup('latent_buildbot_slave') amis = list(r.images.all()) ami = amis[0] bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_owners=[int(ami.owner_id)] ) image = bs.get_image() self.assertEqual(image.owner_id, ami.owner_id) @mock_ec2 def test_get_image_location(self): c, r = self.botoSetup('latent_buildbot_slave') bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_location_regex='amazon/.*' ) image = bs.get_image() self.assertTrue(image.image_location.startswith("amazon/")) @mock_ec2 def test_get_image_location_not_found(self): def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', valid_ami_location_regex='foobar.*' ) with self.assertRaises(ValueError): create_worker() @mock_ec2 def test_fail_multiplier_and_max_are_none(self): ''' price_multiplier and max_spot_price may not be None at the same time. ''' c, r = self.botoSetup() amis = list(r.images.all()) def create_worker(): ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', keypair_name="latent_buildbot_worker", security_name='latent_buildbot_worker', ami=amis[0].id, region='us-west-1', spot_instance=True, price_multiplier=None, max_spot_price=None ) with self.assertRaises(ValueError): create_worker() class TestEC2LatentWorkerDefaultKeyairSecurityGroup(unittest.TestCase): ec2_connection = None def setUp(self): super(TestEC2LatentWorkerDefaultKeyairSecurityGroup, self).setUp() if boto3 is None: raise unittest.SkipTest("moto not found") def botoSetup(self): c = boto3.client('ec2', region_name='us-east-1') r = boto3.resource('ec2', region_name='us-east-1') try: r.create_key_pair(KeyName='latent_buildbot_slave') r.create_key_pair(KeyName='test_keypair') except NotImplementedError: raise unittest.SkipTest("KeyPairs.create_key_pair not implemented" " in this version of moto, please update.") r.create_security_group(GroupName='latent_buildbot_slave', Description='the security group') r.create_security_group(GroupName='test_security_group', Description='other security group') instance = r.create_instances(ImageId=anyImageId(c), MinCount=1, MaxCount=1)[0] c.create_image(InstanceId=instance.id, Name="foo", Description="bar") c.terminate_instances(InstanceIds=[instance.id]) return c, r @mock_ec2 def test_no_default_security_warning_when_security_group_ids(self): c, r = self.botoSetup() amis = list(r.images.all()) bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, keypair_name='test_keypair', subnet_id=["sn-1"] ) self.assertEqual(bs.security_name, None) @mock_ec2 def test_use_non_default_keypair_security(self): c, r = self.botoSetup() amis = list(r.images.all()) with assertNotProducesWarnings(DeprecatedWorkerNameWarning): bs = ec2.EC2LatentWorker('bot1', 'sekrit', 'm1.large', identifier='publickey', secret_identifier='privatekey', ami=amis[0].id, security_name='test_security_group', keypair_name='test_keypair', ) self.assertEqual(bs.keypair_name, 'test_keypair') self.assertEqual(bs.security_name, 'test_security_group') buildbot-2.6.0/master/buildbot/test/unit/test_worker_kubernetes.py000066400000000000000000000101501361162603000255240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.test.fake import fakemaster from buildbot.test.fake.fakebuild import FakeBuildForRendering as FakeBuild from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot from buildbot.test.fake.kube import KubeClientService from buildbot.test.util.misc import TestReactorMixin from buildbot.util.kubeclientservice import KubeError from buildbot.util.kubeclientservice import KubeHardcodedConfig from buildbot.worker import kubernetes class FakeResult: code = 204 def mock_delete(*args): return defer.succeed(FakeResult()) class TestKubernetesWorker(TestReactorMixin, unittest.TestCase): worker = None def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def setupWorker(self, *args, **kwargs): config = KubeHardcodedConfig(master_url="https://kube.example.com") self.worker = worker = kubernetes.KubeLatentWorker( *args, kube_config=config, **kwargs) master = fakemaster.make_master(self, wantData=True) self._kube = yield KubeClientService.getFakeService(master, self, kube_config=config) worker.setServiceParent(master) yield master.startService() self.assertTrue(config.running) def cleanup(): self._kube.delete = mock_delete self.addCleanup(master.stopService) self.addCleanup(cleanup) return worker def test_instantiate(self): worker = kubernetes.KubeLatentWorker('worker') # class instantiation configures nothing self.assertEqual(getattr(worker, '_kube', None), None) @defer.inlineCallbacks def test_wrong_arg(self): with self.assertRaises(TypeError): yield self.setupWorker('worker', wrong_param='wrong_param') def test_service_arg(self): return self.setupWorker('worker') @defer.inlineCallbacks def test_start_service(self): yield self.setupWorker('worker') # http is lazily created on worker substantiation self.assertNotEqual(self.worker._kube, None) @defer.inlineCallbacks def test_start_worker(self): worker = yield self.setupWorker('worker') d = worker.substantiate(None, FakeBuild()) worker.attached(FakeBot()) yield d self.assertEqual(len(worker._kube.pods), 1) pod_name = list(worker._kube.pods.keys())[0] self.assertRegex(pod_name, r'default/buildbot-worker-[0-9a-f]+') pod = worker._kube.pods[pod_name] self.assertEqual( sorted(pod['spec'].keys()), ['containers', 'restartPolicy']) self.assertEqual( sorted(pod['spec']['containers'][0].keys()), ['env', 'image', 'name', 'resources']) self.assertEqual(pod['spec']['containers'][0]['image'], 'rendered:buildbot/buildbot-worker') self.assertEqual(pod['spec']['restartPolicy'], 'Never') @defer.inlineCallbacks def test_start_worker_but_error(self): worker = yield self.setupWorker('worker') def createPod(namespace, spec): raise KubeError({'message': "yeah, but no"}) self.patch(self._kube, 'createPod', createPod) with self.assertRaises(LatentWorkerFailedToSubstantiate): yield worker.substantiate(None, FakeBuild()) self.assertEqual(worker.instance, None) buildbot-2.6.0/master/buildbot/test/unit/test_worker_libvirt.py000066400000000000000000000306361361162603000250430ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import utils from twisted.python import failure from twisted.trial import unittest from buildbot import config from buildbot.test.fake import libvirt from buildbot.test.util.misc import TestReactorMixin from buildbot.util import eventual from buildbot.worker import libvirt as libvirtworker class TestLibVirtWorker(unittest.TestCase): class ConcreteWorker(libvirtworker.LibVirtWorker): pass def setUp(self): self.patch(libvirtworker, "libvirt", libvirt) self.conn = libvirtworker.Connection("test://") self.lvconn = self.conn.connection def test_constructor_nolibvirt(self): self.patch(libvirtworker, "libvirt", None) with self.assertRaises(config.ConfigErrors): self.ConcreteWorker('bot', 'pass', None, 'path', 'path') @defer.inlineCallbacks def test_constructor_minimal(self): bs = self.ConcreteWorker('bot', 'pass', self.conn, 'path', 'otherpath') yield bs._find_existing_deferred self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.connection, self.conn) self.assertEqual(bs.image, 'path') self.assertEqual(bs.base_image, 'otherpath') @defer.inlineCallbacks def test_find_existing(self): d = self.lvconn.fake_add("bot") bs = self.ConcreteWorker('bot', 'pass', self.conn, 'p', 'o') yield bs._find_existing_deferred self.assertEqual(bs.domain.domain, d) @defer.inlineCallbacks def test_prepare_base_image_none(self): self.patch(utils, "getProcessValue", mock.Mock()) utils.getProcessValue.side_effect = lambda x, y: defer.succeed(0) bs = self.ConcreteWorker('bot', 'pass', self.conn, 'p', None) yield bs._find_existing_deferred yield bs._prepare_base_image() self.assertEqual(utils.getProcessValue.call_count, 0) @defer.inlineCallbacks def test_prepare_base_image_cheap(self): self.patch(utils, "getProcessValue", mock.Mock()) utils.getProcessValue.side_effect = lambda x, y: defer.succeed(0) bs = self.ConcreteWorker('bot', 'pass', self.conn, 'p', 'o') yield bs._find_existing_deferred yield bs._prepare_base_image() utils.getProcessValue.assert_called_with( "qemu-img", ["create", "-b", "o", "-f", "qcow2", "p"]) @defer.inlineCallbacks def test_prepare_base_image_full(self): self.patch(utils, "getProcessValue", mock.Mock()) utils.getProcessValue.side_effect = lambda x, y: defer.succeed(0) bs = self.ConcreteWorker('bot', 'pass', self.conn, 'p', 'o') yield bs._find_existing_deferred bs.cheap_copy = False yield bs._prepare_base_image() utils.getProcessValue.assert_called_with( "cp", ["o", "p"]) @defer.inlineCallbacks def _test_stop_instance(self, graceful, fast, expected_destroy, expected_shutdown, shutdown_side_effect=None): bs = self.ConcreteWorker('name', 'p', self.conn, mock.sentinel.hd_image, 'o', xml='') bs.graceful_shutdown = graceful domain_mock = mock.Mock() if shutdown_side_effect: domain_mock.shutdown.side_effect = shutdown_side_effect bs.domain = libvirtworker.Domain(mock.sentinel.connection, domain_mock) with mock.patch('os.remove') as remove_mock: yield bs.stop_instance(fast=fast) self.assertIsNone(bs.domain) self.assertEqual(int(expected_destroy), domain_mock.destroy.call_count) self.assertEqual(int(expected_shutdown), domain_mock.shutdown.call_count) remove_mock.assert_called_once_with(mock.sentinel.hd_image) @defer.inlineCallbacks def test_stop_instance_destroy(self): yield self._test_stop_instance(graceful=False, fast=False, expected_destroy=True, expected_shutdown=False) @defer.inlineCallbacks def test_stop_instance_shutdown(self): yield self._test_stop_instance(graceful=True, fast=False, expected_destroy=False, expected_shutdown=True) @defer.inlineCallbacks def test_stop_instance_shutdown_fails(self): yield self._test_stop_instance(graceful=True, fast=False, expected_destroy=True, expected_shutdown=True, shutdown_side_effect=Exception) @defer.inlineCallbacks def test_start_instance(self): bs = self.ConcreteWorker('b', 'p', self.conn, 'p', 'o', xml='') prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) yield bs._find_existing_deferred started = yield bs.start_instance(mock.Mock()) self.assertEqual(started, True) @defer.inlineCallbacks def test_start_instance_create_fails(self): bs = self.ConcreteWorker('b', 'p', self.conn, 'p', 'o', xml='') prep = mock.Mock() prep.side_effect = lambda: defer.succeed(0) self.patch(bs, "_prepare_base_image", prep) create = mock.Mock() create.side_effect = lambda self: defer.fail( failure.Failure(RuntimeError('oh noes'))) self.patch(libvirtworker.Connection, 'create', create) yield bs._find_existing_deferred started = yield bs.start_instance(mock.Mock()) self.assertEqual(bs.domain, None) self.assertEqual(started, False) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def setup_canStartBuild(self): bs = self.ConcreteWorker('b', 'p', self.conn, 'p', 'o') yield bs._find_existing_deferred bs.parent = mock.Mock() bs.config_version = 0 bs.parent.master.botmaster.getLockFromLockAccesses = mock.Mock(return_value=[]) bs.updateLocks() return bs @defer.inlineCallbacks def test_canStartBuild(self): bs = yield self.setup_canStartBuild() self.assertEqual(bs.canStartBuild(), True) @defer.inlineCallbacks def test_canStartBuild_notready(self): """ If a LibVirtWorker hasn't finished scanning for existing VMs then we shouldn't start builds on it as it might create a 2nd VM when we want to reuse the existing one. """ bs = yield self.setup_canStartBuild() bs.ready = False self.assertEqual(bs.canStartBuild(), False) @defer.inlineCallbacks def test_canStartBuild_domain_and_not_connected(self): """ If we've found that the VM this worker would instance already exists but hasn't connected then we shouldn't start builds or we'll end up with a dupe. """ bs = yield self.setup_canStartBuild() bs.domain = mock.Mock() self.assertEqual(bs.canStartBuild(), False) @defer.inlineCallbacks def test_canStartBuild_domain_and_connected(self): """ If we've found an existing VM and it is connected then we should start builds """ bs = yield self.setup_canStartBuild() bs.domain = mock.Mock() isconnected = mock.Mock() isconnected.return_value = True self.patch(bs, "isConnected", isconnected) self.assertEqual(bs.canStartBuild(), True) class TestWorkQueue(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def tearDown(self): return eventual.flushEventualQueue() def delayed_success(self): def work(): d = defer.Deferred() self.reactor.callLater(0, d.callback, True) return d return work def delayed_errback(self): def work(): d = defer.Deferred() self.reactor.callLater(0, d.errback, failure.Failure( RuntimeError("Test failure"))) return d return work def expect_errback(self, d): @d.addCallback def shouldnt_get_called(f): self.assertEqual(True, False) @d.addErrback def errback(f): """ log.msg("errback called?") """ return d @defer.inlineCallbacks def test_handle_exceptions(self): queue = libvirtworker.WorkQueue() def work(): raise ValueError yield self.expect_errback(queue.execute(work)) @defer.inlineCallbacks def test_handle_immediate_errback(self): queue = libvirtworker.WorkQueue() def work(): return defer.fail(RuntimeError("Sad times")) yield self.expect_errback(queue.execute(work)) @defer.inlineCallbacks def test_handle_delayed_errback(self): queue = libvirtworker.WorkQueue() work = self.delayed_errback() yield self.expect_errback(queue.execute(work)) @defer.inlineCallbacks def test_handle_immediate_success(self): queue = libvirtworker.WorkQueue() def work(): return defer.succeed(True) yield queue.execute(work) @defer.inlineCallbacks def test_handle_delayed_success(self): queue = libvirtworker.WorkQueue() work = self.delayed_success() yield queue.execute(work) @defer.inlineCallbacks def test_single_pow_fires(self): queue = libvirtworker.WorkQueue() yield queue.execute(self.delayed_success()) @defer.inlineCallbacks def test_single_pow_errors_gracefully(self): queue = libvirtworker.WorkQueue() d = queue.execute(self.delayed_errback()) yield self.expect_errback(d) @defer.inlineCallbacks def test_fail_doesnt_break_further_work(self): queue = libvirtworker.WorkQueue() yield self.expect_errback(queue.execute(self.delayed_errback())) yield queue.execute(self.delayed_success()) @defer.inlineCallbacks def test_second_pow_fires(self): queue = libvirtworker.WorkQueue() yield queue.execute(self.delayed_success()) yield queue.execute(self.delayed_success()) @defer.inlineCallbacks def test_work(self): queue = libvirtworker.WorkQueue() # We want these deferreds to fire in order flags = {1: False, 2: False, 3: False} # When first deferred fires, flags[2] and flags[3] should still be false # flags[1] shouldn't already be set, either @defer.inlineCallbacks def d1(): yield queue.execute(self.delayed_success()) self.assertEqual(flags[1], False) flags[1] = True self.assertEqual(flags[2], False) self.assertEqual(flags[3], False) # When second deferred fires, only flags[3] should be set # flags[2] should definitely be False @defer.inlineCallbacks def d2(): yield queue.execute(self.delayed_success()) self.assertFalse(flags[2]) flags[2] = True self.assertTrue(flags[1]) self.assertFalse(flags[3]) # When third deferred fires, only flags[3] should be unset @defer.inlineCallbacks def d3(): yield queue.execute(self.delayed_success()) self.assertFalse(flags[3]) flags[3] = True self.assertTrue(flags[1]) self.assertTrue(flags[2]) yield defer.DeferredList([d1(), d2(), d3()], fireOnOneErrback=True) buildbot-2.6.0/master/buildbot/test/unit/test_worker_local.py000066400000000000000000000067301361162603000244600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.worker import local class TestLocalWorker(TestReactorMixin, unittest.TestCase): try: from buildbot_worker.bot import LocalWorker as _ # noqa except ImportError: skip = "buildbot-worker package is not installed" def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantDb=True, wantData=True) self.botmaster = self.master.botmaster self.workers = self.master.workers def createWorker(self, name='bot', attached=False, configured=True, **kwargs): worker = local.LocalWorker(name, **kwargs) if configured: worker.setServiceParent(self.workers) return worker @defer.inlineCallbacks def test_reconfigService_attrs(self): old = self.createWorker('bot', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) new = self.createWorker('bot', configured=False, max_builds=3, notify_on_missing=['her@me.com'], missing_timeout=121, workdir=os.path.abspath('custom'), properties={'a': 'c'}) old.updateWorker = mock.Mock(side_effect=lambda: defer.succeed(None)) yield old.startService() self.assertEqual( old.remote_worker.bot.basedir, os.path.abspath('basedir/workers/bot')) yield old.reconfigServiceWithSibling(new) self.assertEqual(old.max_builds, 3) self.assertEqual(old.notify_on_missing, ['her@me.com']) self.assertEqual(old.missing_timeout, 121) self.assertEqual(old.properties.getProperty('a'), 'c') self.assertEqual(old.registration.updates, ['bot']) self.assertTrue(old.updateWorker.called) # make sure that we can provide an absolute path self.assertEqual( old.remote_worker.bot.basedir, os.path.abspath('custom')) yield old.stopService() @defer.inlineCallbacks def test_workerinfo(self): wrk = self.createWorker('bot', max_builds=2, notify_on_missing=['me@me.com'], missing_timeout=120, properties={'a': 'b'}) yield wrk.startService() info = yield wrk.conn.remoteGetWorkerInfo() self.assertIn("worker_commands", info) yield wrk.stopService() buildbot-2.6.0/master/buildbot/test/unit/test_worker_manager.py000066400000000000000000000101731361162603000247740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from zope.interface import implementer from buildbot import interfaces from buildbot.process import botmaster from buildbot.test.fake import fakemaster from buildbot.test.util.misc import TestReactorMixin from buildbot.util import service from buildbot.worker import manager as workermanager @implementer(interfaces.IWorker) class FakeWorker(service.BuildbotService): reconfig_count = 0 def __init__(self, workername): super().__init__(name=workername) def reconfigService(self): self.reconfig_count += 1 self.configured = True return defer.succeed(None) class FakeWorker2(FakeWorker): pass class TestWorkerManager(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantData=True) self.master.mq = self.master.mq self.workers = workermanager.WorkerManager(self.master) yield self.workers.setServiceParent(self.master) # workers expect a botmaster as well as a manager. self.master.botmaster.disownServiceParent() self.botmaster = botmaster.BotMaster() self.master.botmaster = self.botmaster yield self.master.botmaster.setServiceParent(self.master) self.new_config = mock.Mock() self.workers.startService() def tearDown(self): return self.workers.stopService() @defer.inlineCallbacks def test_reconfigServiceWorkers_add_remove(self): worker = FakeWorker('worker1') self.new_config.workers = [worker] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) self.assertIdentical(worker.parent, self.workers) self.assertEqual(self.workers.workers, {'worker1': worker}) self.new_config.workers = [] self.assertEqual(worker.running, True) yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) self.assertEqual(worker.running, False) @defer.inlineCallbacks def test_reconfigServiceWorkers_reconfig(self): worker = FakeWorker('worker1') yield worker.setServiceParent(self.workers) worker.parent = self.master worker.manager = self.workers worker.botmaster = self.master.botmaster worker_new = FakeWorker('worker1') self.new_config.workers = [worker_new] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) # worker was not replaced.. self.assertIdentical(self.workers.workers['worker1'], worker) @defer.inlineCallbacks def test_reconfigServiceWorkers_class_changes(self): worker = FakeWorker('worker1') yield worker.setServiceParent(self.workers) worker_new = FakeWorker2('worker1') self.new_config.workers = [worker_new] yield self.workers.reconfigServiceWithBuildbotConfig(self.new_config) # worker *was* replaced (different class) self.assertIdentical(self.workers.workers['worker1'], worker_new) @defer.inlineCallbacks def test_newConnection_remoteGetWorkerInfo_failure(self): class Error(RuntimeError): pass conn = mock.Mock() conn.remoteGetWorkerInfo = mock.Mock( return_value=defer.fail(Error())) yield self.assertFailure( self.workers.newConnection(conn, "worker"), Error) buildbot-2.6.0/master/buildbot/test/unit/test_worker_marathon.py000066400000000000000000000206331361162603000251750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.process.properties import Properties from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.marathon import MarathonLatentWorker class TestMarathonLatentWorker(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.build = Properties( image="busybox:latest", builder="docker_worker") self.worker = None def tearDown(self): if self.worker is not None: class FakeResult: code = 200 self._http.delete = lambda _: defer.succeed(FakeResult()) self.worker.master.stopService() self.flushLoggedErrors(LatentWorkerSubstantiatiationCancelled) def test_constructor_normal(self): worker = MarathonLatentWorker('bot', 'tcp://marathon.local', 'foo', 'bar', 'debian:wheezy') # class instantiation configures nothing self.assertEqual(worker._http, None) @defer.inlineCallbacks def makeWorker(self, **kwargs): kwargs.setdefault('image', 'debian:wheezy') worker = MarathonLatentWorker('bot', 'tcp://marathon.local', **kwargs) self.worker = worker master = fakemaster.make_master(self, wantData=True) self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( master, self, 'tcp://marathon.local', auth=kwargs.get('auth')) yield worker.setServiceParent(master) worker.reactor = self.reactor yield master.startService() worker.masterhash = "masterhash" return worker @defer.inlineCallbacks def test_start_service(self): worker = self.worker = yield self.makeWorker() # http is lazily created on worker substantiation self.assertNotEqual(worker._http, None) @defer.inlineCallbacks def test_start_worker(self): # http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps worker = yield self.makeWorker() worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=201, content_json={'Id': 'id'}) d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) # we simulate a connection worker.attached(FakeBot()) yield d self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_but_no_connection_and_shutdown(self): worker = yield self.makeWorker() worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=201, content_json={'Id': 'id'}) worker.substantiate(None, fakebuild.FakeBuildForRendering()) self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_but_error(self): worker = yield self.makeWorker() self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': 'BRIDGE' }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass" } }, code=404, content_json={'message': 'image not found'}) self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) self.reactor.advance(.1) with self.assertRaises(Exception): yield d self.assertEqual(worker.instance, None) # teardown makes sure all containers are cleaned up @defer.inlineCallbacks def test_start_worker_with_params(self): # http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps worker = yield self.makeWorker(marathon_extra_config={ 'container': { 'docker': { 'network': None } }, 'env': { 'PARAMETER': 'foo' } }) worker.password = "pass" worker.masterFQDN = "master" self._http.expect( method='delete', ep='/v2/apps/buildbot-worker/buildbot-bot-masterhash') self._http.expect( method='post', ep='/v2/apps', json={ 'instances': 1, 'container': { 'docker': { 'image': 'rendered:debian:wheezy', 'network': None }, 'type': 'DOCKER' }, 'id': 'buildbot-worker/buildbot-bot-masterhash', 'env': { 'BUILDMASTER': "master", 'BUILDMASTER_PORT': '1234', 'WORKERNAME': 'bot', 'WORKERPASS': "pass", 'PARAMETER': 'foo' } }, code=201, content_json={'Id': 'id'}) d = worker.substantiate(None, fakebuild.FakeBuildForRendering()) # we simulate a connection worker.attached(FakeBot()) yield d self.assertEqual(worker.instance, {'Id': 'id'}) # teardown makes sure all containers are cleaned up buildbot-2.6.0/master/buildbot/test/unit/test_worker_openstack.py000066400000000000000000000331241361162603000253520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import mock from twisted.internet import defer from twisted.trial import unittest import buildbot.test.fake.openstack as novaclient from buildbot import config from buildbot import interfaces from buildbot.process.properties import Interpolate from buildbot.process.properties import Properties from buildbot.worker import openstack class TestOpenStackWorker(unittest.TestCase): os_auth = dict( os_username='user', os_password='pass', os_tenant_name='tenant', os_auth_url='auth') bs_image_args = dict( flavor=1, image='image-uuid', **os_auth) def setUp(self): self.patch(openstack, "client", novaclient) self.patch(openstack, "loading", novaclient) self.patch(openstack, "session", novaclient) self.build = Properties(image=novaclient.TEST_UUIDS['image']) def test_constructor_nonova(self): self.patch(openstack, "client", None) with self.assertRaises(config.ConfigErrors): openstack.OpenStackLatentWorker('bot', 'pass', **self.bs_image_args) def test_constructor_nokeystoneauth(self): self.patch(openstack, "loading", None) with self.assertRaises(config.ConfigErrors): openstack.OpenStackLatentWorker('bot', 'pass', **self.bs_image_args) def test_constructor_minimal(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.flavor, 1) self.assertEqual(bs.image, 'image-uuid') self.assertEqual(bs.block_devices, None) self.assertIsInstance(bs.novaclient, novaclient.Client) def test_constructor_minimal_keystone_v3(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', os_user_domain='test_oud', os_project_domain='test_opd', **self.bs_image_args) self.assertEqual(bs.workername, 'bot') self.assertEqual(bs.password, 'pass') self.assertEqual(bs.flavor, 1) self.assertEqual(bs.image, 'image-uuid') self.assertEqual(bs.block_devices, None) self.assertIsInstance(bs.novaclient, novaclient.Client) self.assertEqual(bs.novaclient.session.auth.user_domain_name, 'test_oud') self.assertEqual(bs.novaclient.session.auth.project_domain_name, 'test_opd') def test_constructor_region(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', region="test-region", **self.bs_image_args) self.assertEqual(bs.novaclient.client.region_name, "test-region") def test_constructor_block_devices_default(self): block_devices = [{'uuid': 'uuid', 'volume_size': 10}] bs = openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) self.assertEqual(bs.image, None) self.assertEqual(len(bs.block_devices), 1) self.assertEqual(bs.block_devices, [{'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': 10, 'uuid': 'uuid'}]) @defer.inlineCallbacks def test_constructor_block_devices_get_sizes(self): block_devices = [ {'source_type': 'image', 'uuid': novaclient.TEST_UUIDS['image']}, {'source_type': 'image', 'uuid': novaclient.TEST_UUIDS['image'], 'volume_size': 4}, {'source_type': 'volume', 'uuid': novaclient.TEST_UUIDS['volume']}, {'source_type': 'snapshot', 'uuid': novaclient.TEST_UUIDS['snapshot']}, ] def check_volume_sizes(_images, block_devices): self.assertEqual(len(block_devices), 4) self.assertEqual(block_devices[0]['volume_size'], 1) self.assertIsInstance(block_devices[0]['volume_size'], int, "Volume size is an integer.") self.assertEqual(block_devices[1]['volume_size'], 4) self.assertEqual(block_devices[2]['volume_size'], 4) self.assertEqual(block_devices[3]['volume_size'], 2) lw = openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) self.assertEqual(lw.image, None) self.assertEqual(lw.block_devices, [{'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['image']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'image', 'volume_size': 4, 'uuid': novaclient.TEST_UUIDS['image']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'volume', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['volume']}, {'boot_index': 0, 'delete_on_termination': True, 'destination_type': 'volume', 'device_name': 'vda', 'source_type': 'snapshot', 'volume_size': None, 'uuid': novaclient.TEST_UUIDS['snapshot']}]) self.patch(lw, "_start_instance", check_volume_sizes) yield lw.start_instance(self.build) @defer.inlineCallbacks def test_constructor_block_devices_missing(self): block_devices = [ {'source_type': 'image', 'uuid': '9fb2e6e8-110d-4388-8c23-0fcbd1e2fcc1'}, ] lw = openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, block_devices=block_devices, **self.os_auth) yield self.assertFailure(lw.start_instance(self.build), novaclient.NotFound) def test_constructor_no_image(self): """ Must have one of image or block_devices specified. """ with self.assertRaises(ValueError): openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, **self.os_auth) @defer.inlineCallbacks def test_getImage_string(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) image_uuid = yield bs._getImage(self.build) self.assertEqual('image-uuid', image_uuid) @defer.inlineCallbacks def test_getImage_callable(self): def image_callable(images): filtered = [i for i in images if i.id == 'uuid1'] return filtered[0].id bs = openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, image=image_callable, **self.os_auth) os_client = bs.novaclient os_client.images._add_items([ novaclient.Image('uuid1', 'name1', 1), novaclient.Image('uuid2', 'name2', 1), novaclient.Image('uuid3', 'name3', 1), ]) image_uuid = yield bs._getImage(self.build) self.assertEqual('uuid1', image_uuid) @defer.inlineCallbacks def test_getImage_renderable(self): bs = openstack.OpenStackLatentWorker('bot', 'pass', flavor=1, image=Interpolate('%(prop:image)s'), **self.os_auth) image_uuid = yield bs._getImage(self.build) self.assertEqual(novaclient.TEST_UUIDS['image'], image_uuid) @defer.inlineCallbacks def test_start_instance_already_exists(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs.instance = mock.Mock() yield self.assertFailure(bs.start_instance(self.build), ValueError) @defer.inlineCallbacks def test_start_instance_first_fetch_fail(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_get', True) self.patch(novaclient.Servers, 'gets_until_disappears', 0) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_fail_to_find(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_get', True) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_fail_to_start(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 self.patch(novaclient.Servers, 'fail_to_start', True) yield self.assertFailure(bs.start_instance(self.build), interfaces.LatentWorkerFailedToSubstantiate) @defer.inlineCallbacks def test_start_instance_success(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertTrue(uuid) self.assertEqual(image_uuid, 'image-uuid') self.assertTrue(time_waiting) @defer.inlineCallbacks def test_start_instance_check_meta(self): meta_arg = {'some_key': 'some-value'} bs = openstack.OpenStackLatentWorker('bot', 'pass', meta=meta_arg, **self.bs_image_args) bs._poll_resolution = 0 uuid, image_uuid, time_waiting = yield bs.start_instance(self.build) self.assertIn('meta', bs.instance.boot_kwargs) self.assertIdentical(bs.instance.boot_kwargs['meta'], meta_arg) @defer.inlineCallbacks def test_stop_instance_not_set(self): """ Test stopping the instance but with no instance to stop. """ bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) bs.instance = None stopped = yield bs.stop_instance() self.assertEqual(stopped, None) def test_stop_instance_missing(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) instance = mock.Mock() instance.id = 'uuid' bs.instance = instance # TODO: Check log for instance not found. bs.stop_instance() def test_stop_instance_fast(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() self.assertIn(inst.id, s.instances) bs.stop_instance(fast=True) self.assertNotIn(inst.id, s.instances) def test_stop_instance_notfast(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() self.assertIn(inst.id, s.instances) bs.stop_instance(fast=False) self.assertNotIn(inst.id, s.instances) def test_stop_instance_unknown(self): bs = openstack.OpenStackLatentWorker( 'bot', 'pass', **self.bs_image_args) # Make instance immediately active. self.patch(novaclient.Servers, 'gets_until_active', 0) s = novaclient.Servers() bs.instance = inst = s.create() # Set status to DELETED. Instance should not be deleted when shutting # down as it already is. inst.status = novaclient.DELETED self.assertIn(inst.id, s.instances) bs.stop_instance() self.assertIn(inst.id, s.instances) buildbot-2.6.0/master/buildbot/test/unit/test_worker_protocols_base.py000066400000000000000000000045111361162603000263770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.fake import fakeprotocol from buildbot.test.util import protocols from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.protocols import base class TestListener(TestReactorMixin, unittest.TestCase): @defer.inlineCallbacks def test_constructor(self): self.setUpTestReactor() master = fakemaster.make_master(self) listener = base.Listener() yield listener.setServiceParent(master) self.assertEqual(listener.master, master) class TestFakeConnection(protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.worker = mock.Mock() self.conn = fakeprotocol.FakeConnection(self.master, self.worker) class TestConnection(protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.worker = mock.Mock() self.conn = base.Connection(self.master, self.worker) def test_constructor(self): self.assertEqual(self.conn.master, self.master) self.assertEqual(self.conn.worker, self.worker) def test_notify(self): cb = mock.Mock() self.conn.notifyOnDisconnect(cb) self.assertEqual(cb.call_args_list, []) self.conn.notifyDisconnected() self.assertNotEqual(cb.call_args_list, []) buildbot-2.6.0/master/buildbot/test/unit/test_worker_protocols_pb.py000066400000000000000000000374161361162603000261000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.spread import pb as twisted_pb from twisted.trial import unittest from buildbot.test.fake import fakemaster from buildbot.test.util import protocols as util_protocols from buildbot.test.util.misc import TestReactorMixin from buildbot.worker.protocols import base from buildbot.worker.protocols import pb class TestListener(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) @defer.inlineCallbacks def makeListener(self): listener = pb.Listener() yield listener.setServiceParent(self.master) return listener @defer.inlineCallbacks def test_constructor(self): listener = yield self.makeListener() self.assertEqual(listener.master, self.master) self.assertEqual(listener._registrations, {}) @defer.inlineCallbacks def test_updateRegistration_simple(self): listener = yield self.makeListener() reg = yield listener.updateRegistration('example', 'pass', 'tcp:1234') self.assertEqual(self.master.pbmanager._registrations, [('tcp:1234', 'example', 'pass')]) self.assertEqual( listener._registrations['example'], ('pass', 'tcp:1234', reg)) @defer.inlineCallbacks def test_updateRegistration_pass_changed(self): listener = yield self.makeListener() listener.updateRegistration('example', 'pass', 'tcp:1234') reg1 = yield listener.updateRegistration('example', 'pass1', 'tcp:1234') self.assertEqual( listener._registrations['example'], ('pass1', 'tcp:1234', reg1)) self.assertEqual( self.master.pbmanager._unregistrations, [('tcp:1234', 'example')]) @defer.inlineCallbacks def test_updateRegistration_port_changed(self): listener = yield self.makeListener() listener.updateRegistration('example', 'pass', 'tcp:1234') reg1 = yield listener.updateRegistration('example', 'pass', 'tcp:4321') self.assertEqual( listener._registrations['example'], ('pass', 'tcp:4321', reg1)) self.assertEqual( self.master.pbmanager._unregistrations, [('tcp:1234', 'example')]) @defer.inlineCallbacks def test_getPerspective(self): listener = yield self.makeListener() worker = mock.Mock() worker.workername = 'test' mind = mock.Mock() listener.updateRegistration('example', 'pass', 'tcp:1234') self.master.workers.register(worker) conn = yield listener._getPerspective(mind, worker.workername) mind.broker.transport.setTcpKeepAlive.assert_called_with(1) self.assertIsInstance(conn, pb.Connection) class TestConnectionApi(util_protocols.ConnectionInterfaceTest, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.conn = pb.Connection(self.master, mock.Mock(), mock.Mock()) class TestConnection(TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = fakemaster.make_master(self) self.mind = mock.Mock() self.worker = mock.Mock() def test_constructor(self): conn = pb.Connection(self.master, self.worker, self.mind) self.assertEqual(conn.mind, self.mind) self.assertEqual(conn.master, self.master) self.assertEqual(conn.worker, self.worker) @defer.inlineCallbacks def test_attached(self): conn = pb.Connection(self.master, self.worker, self.mind) att = yield conn.attached(self.mind) self.worker.attached.assert_called_with(conn) self.assertEqual(att, conn) self.reactor.pump([10] * 361) self.mind.callRemote.assert_has_calls([ mock.call('print', message="keepalive") ]) conn.detached(self.mind) yield conn.waitShutdown() @defer.inlineCallbacks def test_detached(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.attached(self.mind) conn.detached(self.mind) self.assertEqual(conn.keepalive_timer, None) self.assertEqual(conn.mind, None) yield conn.waitShutdown() def test_loseConnection(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.loseConnection() self.assertEqual(conn.keepalive_timer, None) conn.mind.broker.transport.loseConnection.assert_called_with() def test_remotePrint(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.remotePrint(message='test') conn.mind.callRemote.assert_called_with('print', message='test') @defer.inlineCallbacks def test_remoteGetWorkerInfo_slave(self): def side_effect(*args, **kwargs): if 'getWorkerInfo' in args: return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if 'getSlaveInfo' in args: return defer.succeed({'info': 'test'}) if 'getCommands' in args: return defer.succeed({'x': 1, 'y': 2}) if 'getVersion' in args: return defer.succeed('TheVersion') self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), mock.call('getCommands'), mock.call('getVersion'), ] self.mind.callRemote.assert_has_calls(calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_slave_2_16(self): """In buildslave 2.16 all information about worker is retrieved in a single getSlaveInfo() call.""" def side_effect(*args, **kwargs): if 'getWorkerInfo' in args: return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if 'getSlaveInfo' in args: return defer.succeed({ 'info': 'test', 'slave_commands': {'x': 1, 'y': 2}, 'version': 'TheVersion', }) if 'getCommands' in args: return defer.succeed({'x': 1, 'y': 2}) if 'getVersion' in args: return defer.succeed('TheVersion') self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), ] self.mind.callRemote.assert_has_calls(calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_worker(self): def side_effect(*args, **kwargs): if 'getWorkerInfo' in args: return defer.succeed({ 'info': 'test', 'worker_commands': { 'y': 2, 'x': 1 }, 'version': 'TheVersion', }) if 'getSlaveInfo' in args: return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if 'getCommands' in args: return defer.succeed({'x': 1, 'y': 2}) if 'getVersion' in args: return defer.succeed('TheVersion') self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'info': 'test', 'worker_commands': { 'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) calls = [mock.call('getWorkerInfo')] self.mind.callRemote.assert_has_calls(calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_getWorkerInfo_fails(self): def side_effect(*args, **kwargs): if 'getWorkerInfo' in args: return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if 'getSlaveInfo' in args: return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) if 'getCommands' in args: return defer.succeed({'x': 1, 'y': 2}) if 'getVersion' in args: return defer.succeed('TheVersion') self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {'worker_commands': {'y': 2, 'x': 1}, 'version': 'TheVersion'} self.assertEqual(info, r) calls = [mock.call('getSlaveInfo'), mock.call( 'getCommands'), mock.call('getVersion')] self.mind.callRemote.assert_has_calls(calls) @defer.inlineCallbacks def test_remoteGetWorkerInfo_no_info(self): # All remote commands tried in remoteGetWorkerInfo are unavailable. # This should be real old worker... def side_effect(*args, **kwargs): if args[0] == 'print': return return defer.fail(twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None)) self.mind.callRemote.side_effect = side_effect conn = pb.Connection(self.master, self.worker, self.mind) info = yield conn.remoteGetWorkerInfo() r = {} self.assertEqual(info, r) calls = [ mock.call('getWorkerInfo'), mock.call('print', message='buildbot-slave detected, failing back to deprecated buildslave API. ' '(Ignoring missing getWorkerInfo method.)'), mock.call('getSlaveInfo'), mock.call('getCommands'), mock.call('getVersion'), ] self.mind.callRemote.assert_has_calls(calls) @defer.inlineCallbacks def test_remoteSetBuilderList(self): builders = ['builder1', 'builder2'] self.mind.callRemote.return_value = defer.succeed(builders) conn = pb.Connection(self.master, self.worker, self.mind) r = yield conn.remoteSetBuilderList(builders) self.assertEqual(r, builders) self.assertEqual(conn.builders, builders) self.mind.callRemote.assert_called_with('setBuilderList', builders) def test_remoteStartCommand(self): builders = ['builder'] ret_val = {'builder': mock.Mock()} self.mind.callRemote.return_value = defer.succeed(ret_val) conn = pb.Connection(self.master, self.worker, self.mind) conn.remoteSetBuilderList(builders) RCInstance, builder_name, commandID = base.RemoteCommandImpl( ), "builder", None remote_command, args = "command", {"args": 'args'} conn.remoteStartCommand( RCInstance, builder_name, commandID, remote_command, args) callargs = ret_val['builder'].callRemote.call_args_list[0][0] callargs_without_rc = ( callargs[0], callargs[2], callargs[3], callargs[4]) self.assertEqual(callargs_without_rc, ('startCommand', commandID, remote_command, args)) self.assertIsInstance(callargs[1], pb.RemoteCommand) self.assertEqual(callargs[1].impl, RCInstance) @defer.inlineCallbacks def test_do_keepalive(self): conn = pb.Connection(self.master, self.worker, self.mind) yield conn._do_keepalive() self.mind.callRemote.assert_called_with('print', message="keepalive") def test_remoteShutdown(self): self.mind.callRemote.return_value = defer.succeed(None) conn = pb.Connection(self.master, self.worker, self.mind) # note that we do not test the "old way", as it is now *very* old. conn.remoteShutdown() self.mind.callRemote.assert_called_with('shutdown') def test_remoteStartBuild(self): conn = pb.Connection(self.master, self.worker, self.mind) builders = {'builder': mock.Mock()} self.mind.callRemote.return_value = defer.succeed(builders) conn = pb.Connection(self.master, self.worker, self.mind) conn.remoteSetBuilderList(builders) conn.remoteStartBuild('builder') builders['builder'].callRemote.assert_called_with('startBuild') @defer.inlineCallbacks def test_startStopKeepaliveTimer(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.startKeepaliveTimer() self.mind.callRemote.assert_not_called() self.reactor.pump([10] * 361) self.mind.callRemote.assert_has_calls([ mock.call('print', message="keepalive") ]) self.reactor.pump([10] * 361) self.mind.callRemote.assert_has_calls([ mock.call('print', message="keepalive"), mock.call('print', message="keepalive"), ]) conn.stopKeepaliveTimer() yield conn.waitShutdown() def test_perspective_shutdown(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.perspective_shutdown() conn.worker.shutdownRequested.assert_called_with() conn.worker.messageReceivedFromWorker.assert_called_with() def test_perspective_keepalive(self): conn = pb.Connection(self.master, self.worker, self.mind) conn.perspective_keepalive() conn.worker.messageReceivedFromWorker.assert_called_with() class Test_wrapRemoteException(unittest.TestCase): def test_raises_NoSuchMethod(self): def f(): with pb._wrapRemoteException(): raise twisted_pb.RemoteError( 'twisted.spread.flavors.NoSuchMethod', None, None) with self.assertRaises(pb._NoSuchMethod): f() def test_raises_unknown(self): class Error(Exception): pass def f(): with pb._wrapRemoteException(): raise Error() with self.assertRaises(Error): f() def test_raises_RemoteError(self): def f(): with pb._wrapRemoteException(): raise twisted_pb.RemoteError( 'twisted.spread.flavors.ProtocolError', None, None) with self.assertRaises(twisted_pb.RemoteError): f() buildbot-2.6.0/master/buildbot/test/unit/test_www_auth.py000066400000000000000000000237221361162603000236420ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.cred.credentials import UsernamePassword from twisted.cred.error import UnauthorizedLogin from twisted.internet import defer from twisted.trial import unittest from twisted.web.error import Error from twisted.web.guard import BasicCredentialFactory from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.resource import IResource from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth class AuthResourceMixin: def setUpAuthResource(self): self.master = self.make_master(url='h:/a/b/') self.auth = self.master.config.www['auth'] self.master.www.auth = self.auth self.auth.master = self.master class AuthRootResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.AuthRootResource(self.master) def test_getChild_login(self): glr = mock.Mock(name='glr') self.master.www.auth.getLoginResource = glr child = self.rsrc.getChild(b'login', mock.Mock(name='req')) self.assertIdentical(child, glr()) def test_getChild_logout(self): glr = mock.Mock(name='glr') self.master.www.auth.getLogoutResource = glr child = self.rsrc.getChild(b'logout', mock.Mock(name='req')) self.assertIdentical(child, glr()) class AuthBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.AuthBase() self.master = self.make_master(url='h:/a/b/') self.auth.master = self.master self.req = self.make_request(b'/') @defer.inlineCallbacks def test_maybeAutoLogin(self): self.assertEqual((yield self.auth.maybeAutoLogin(self.req)), None) def test_getLoginResource(self): with self.assertRaises(Error): self.auth.getLoginResource() @defer.inlineCallbacks def test_updateUserInfo(self): self.auth.userInfoProvider = auth.UserInfoProviderBase() self.auth.userInfoProvider.getUserInfo = lambda un: {'info': un} self.req.session.user_info = {'username': 'elvira'} yield self.auth.updateUserInfo(self.req) self.assertEqual(self.req.session.user_info, {'info': 'elvira', 'username': 'elvira'}) def getConfigDict(self): self.assertEqual(auth.getConfigDict(), {'name': 'AuthBase'}) class UseAuthInfoProviderBase(unittest.TestCase): @defer.inlineCallbacks def test_getUserInfo(self): uip = auth.UserInfoProviderBase() self.assertEqual((yield uip.getUserInfo('jess')), {'email': 'jess'}) class NoAuth(unittest.TestCase): def test_exists(self): assert auth.NoAuth class RemoteUserAuth(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.RemoteUserAuth(header=b'HDR') self.make_master() self.request = self.make_request(b'/') @defer.inlineCallbacks def test_maybeAutoLogin(self): self.request.input_headers[b'HDR'] = b'rachel@foo.com' yield self.auth.maybeAutoLogin(self.request) self.assertEqual(self.request.session.user_info, { 'username': 'rachel', 'realm': 'foo.com', 'email': 'rachel'}) @defer.inlineCallbacks def test_maybeAutoLogin_no_header(self): try: yield self.auth.maybeAutoLogin(self.request) except Error as e: self.assertEqual(int(e.status), 403) else: self.fail("403 expected") @defer.inlineCallbacks def test_maybeAutoLogin_mismatched_value(self): self.request.input_headers[b'HDR'] = b'rachel' try: yield self.auth.maybeAutoLogin(self.request) except Error as e: self.assertEqual(int(e.status), 403) else: self.fail("403 expected") class AuthRealm(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.auth = auth.RemoteUserAuth(header=b'HDR') self.auth = auth.NoAuth() self.make_master() def test_requestAvatar(self): realm = auth.AuthRealm(self.master, self.auth) itfc, rsrc, logout = realm.requestAvatar("me", None, IResource) self.assertIdentical(itfc, IResource) self.assertIsInstance(rsrc, auth.PreAuthenticatedLoginResource) class TwistedICredAuthBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() # twisted.web makes it difficult to simulate the authentication process, so # this only tests the mechanics of the getLoginResource method. def test_getLoginResource(self): self.auth = auth.TwistedICredAuthBase( credentialFactories=[BasicCredentialFactory("buildbot")], checkers=[InMemoryUsernamePasswordDatabaseDontUse(good=b'guy')]) self.auth.master = self.make_master(url='h:/a/b/') rsrc = self.auth.getLoginResource() self.assertIsInstance(rsrc, HTTPAuthSessionWrapper) class UserPasswordAuth(www.WwwTestMixin, unittest.TestCase): def test_passwordStringToBytes(self): login = {"user_string": "password", "user_bytes": b"password"} correct_login = {b"user_string": b"password", b"user_bytes": b"password"} self.auth = auth.UserPasswordAuth(login) self.assertEqual(self.auth.checkers[0].users, correct_login) login = [("user_string", "password"), ("user_bytes", b"password")] correct_login = {b"user_string": b"password", b"user_bytes": b"password"} self.auth = auth.UserPasswordAuth(login) self.assertEqual(self.auth.checkers[0].users, correct_login) class CustomAuth(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): class MockCustomAuth(auth.CustomAuth): def check_credentials(self, us, ps): return us == 'fellow' and ps == 'correct' def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_callable(self): self.auth = self.MockCustomAuth() cred_good = UsernamePassword('fellow', 'correct') result_good = yield self.auth.checkers[0].requestAvatarId(cred_good) self.assertEqual(result_good, 'fellow') cred_bad = UsernamePassword('bandid', 'incorrect') defer_bad = self.auth.checkers[0].requestAvatarId(cred_bad) yield self.assertFailure(defer_bad, UnauthorizedLogin) class LoginResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() @defer.inlineCallbacks def test_render(self): self.rsrc = auth.LoginResource(self.master) self.rsrc.renderLogin = mock.Mock( spec=self.rsrc.renderLogin, return_value=defer.succeed(b'hi')) yield self.render_resource(self.rsrc, b'/auth/login') self.rsrc.renderLogin.assert_called_with(mock.ANY) class PreAuthenticatedLoginResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.PreAuthenticatedLoginResource(self.master, 'him') @defer.inlineCallbacks def test_render(self): self.auth.maybeAutoLogin = mock.Mock() def updateUserInfo(request): session = request.getSession() session.user_info['email'] = session.user_info['username'] + "@org" session.updateSession(request) self.auth.updateUserInfo = mock.Mock(side_effect=updateUserInfo) res = yield self.render_resource(self.rsrc, b'/auth/login') self.assertEqual(res, {'redirected': b'h:/a/b/#/'}) self.assertFalse(self.auth.maybeAutoLogin.called) self.auth.updateUserInfo.assert_called_with(mock.ANY) self.assertEqual(self.master.session.user_info, {'email': 'him@org', 'username': 'him'}) class LogoutResource(TestReactorMixin, www.WwwTestMixin, AuthResourceMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.setUpAuthResource() self.rsrc = auth.LogoutResource(self.master) @defer.inlineCallbacks def test_render(self): self.master.session.expire = mock.Mock() res = yield self.render_resource(self.rsrc, b'/auth/logout') self.assertEqual(res, {'redirected': b'h:/a/b/#/'}) self.master.session.expire.assert_called_with() @defer.inlineCallbacks def test_render_with_crlf(self): self.master.session.expire = mock.Mock() res = yield self.render_resource(self.rsrc, b'/auth/logout?redirect=%0d%0abla') # everything after a %0d shall be stripped self.assertEqual(res, {'redirected': b'h:/a/b/#'}) self.master.session.expire.assert_called_with() buildbot-2.6.0/master/buildbot/test/unit/test_www_authz.py000066400000000000000000000242161361162603000240330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import fakedb from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import authz from buildbot.www.authz.endpointmatchers import AnyEndpointMatcher from buildbot.www.authz.endpointmatchers import BranchEndpointMatcher from buildbot.www.authz.endpointmatchers import ForceBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import RebuildBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import StopBuildEndpointMatcher from buildbot.www.authz.endpointmatchers import ViewBuildsEndpointMatcher from buildbot.www.authz.roles import RolesFromDomain from buildbot.www.authz.roles import RolesFromEmails from buildbot.www.authz.roles import RolesFromGroups from buildbot.www.authz.roles import RolesFromOwner class Authz(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() authzcfg = authz.Authz( # simple matcher with '*' glob character stringsMatcher=authz.fnmatchStrMatcher, # stringsMatcher = authz.Authz.reStrMatcher, # if you prefer # regular expressions allowRules=[ # admins can do anything, # defaultDeny=False: if user does not have the admin role, we # continue parsing rules AnyEndpointMatcher(role="admins", defaultDeny=False), # rules for viewing builds, builders, step logs # depending on the sourcestamp or buildername ViewBuildsEndpointMatcher( branch="secretbranch", role="agents"), ViewBuildsEndpointMatcher( project="secretproject", role="agents"), ViewBuildsEndpointMatcher(branch="*", role="*"), ViewBuildsEndpointMatcher(project="*", role="*"), StopBuildEndpointMatcher(role="owner"), RebuildBuildEndpointMatcher(role="owner"), # nine-* groups can do stuff on the nine branch BranchEndpointMatcher(branch="nine", role="nine-*"), # eight-* groups can do stuff on the eight branch BranchEndpointMatcher(branch="eight", role="eight-*"), # *-try groups can start "try" builds ForceBuildEndpointMatcher(builder="try", role="*-developers"), # *-mergers groups can start "merge" builds ForceBuildEndpointMatcher(builder="merge", role="*-mergers"), # *-releasers groups can start "release" builds ForceBuildEndpointMatcher( builder="release", role="*-releasers"), ], roleMatchers=[ RolesFromGroups(groupPrefix="buildbot-"), RolesFromEmails(admins=["homer@springfieldplant.com"], agents=["007@mi6.uk"]), RolesFromOwner(role="owner"), RolesFromDomain(admins=["mi7.uk"]) ] ) self.users = dict(homer=dict(email="homer@springfieldplant.com"), bond=dict(email="007@mi6.uk"), moneypenny=dict(email="moneypenny@mi7.uk"), nineuser=dict(email="user@nine.com", groups=["buildbot-nine-mergers", "buildbot-nine-developers"]), eightuser=dict( email="user@eight.com", groups=["buildbot-eight-deverlopers"]) ) self.master = self.make_master(url='h:/a/b/', authz=authzcfg) self.authz = self.master.authz self.master.db.insertTestData([ fakedb.Builder(id=77, name="mybuilder"), fakedb.Master(id=88), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=8822), fakedb.BuildsetProperty(buildsetid=8822, property_name='owner', property_value='["user@nine.com", "force"]'), fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77), fakedb.Build(id=13, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=3), fakedb.Build(id=14, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=4), fakedb.Build(id=15, builderid=77, masterid=88, workerid=13, buildrequestid=82, number=5), ]) def setAllowRules(self, allow_rules): # we should add links to authz and master instances in each new rule for r in allow_rules: r.setAuthz(self.authz) self.authz.allowRules = allow_rules def assertUserAllowed(self, ep, action, options, user): return self.authz.assertUserAllowed(tuple(ep.split("/")), action, options, self.users[user]) @defer.inlineCallbacks def assertUserForbidden(self, ep, action, options, user): try: yield self.authz.assertUserAllowed(tuple(ep.split("/")), action, options, self.users[user]) except authz.Forbidden as err: self.assertIn('need to have role', repr(err)) @defer.inlineCallbacks def test_anyEndpoint(self): yield self.assertUserAllowed("foo/bar", "get", {}, "homer") yield self.assertUserAllowed("foo/bar", "get", {}, "moneypenny") yield self.assertUserForbidden("foo/bar", "get", {}, "bond") @defer.inlineCallbacks def test_stopBuild(self): # admin can always stop yield self.assertUserAllowed("builds/13", "stop", {}, "homer") # owner can always stop yield self.assertUserAllowed("builds/13", "stop", {}, "nineuser") yield self.assertUserAllowed("buildrequests/82", "stop", {}, "nineuser") # not owner cannot stop yield self.assertUserForbidden("builds/13", "stop", {}, "eightuser") yield self.assertUserForbidden("buildrequests/82", "stop", {}, "eightuser") @defer.inlineCallbacks def test_rebuildBuild(self): # admin can rebuild yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") # owner can always rebuild yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") yield self.assertUserAllowed("buildrequests/82", "rebuild", {}, "nineuser") # not owner cannot rebuild yield self.assertUserForbidden("builds/13", "rebuild", {}, "eightuser") yield self.assertUserForbidden("buildrequests/82", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_fnmatchPatternRoleCheck(self): # defaultDeny is True by default so action is denied if no match allow_rules = [ AnyEndpointMatcher(role="[a,b]dmin?") ] self.setAllowRules(allow_rules) yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") # check if action is denied with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_regexPatternRoleCheck(self): # change matcher self.authz.match = authz.reStrMatcher # defaultDeny is True by default so action is denied if no match allow_rules = [ AnyEndpointMatcher(role="(admin|agent)s"), ] self.setAllowRules(allow_rules) yield self.assertUserAllowed("builds/13", "rebuild", {}, "homer") yield self.assertUserAllowed("builds/13", "rebuild", {}, "bond") # check if action is denied with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") with self.assertRaisesRegex(authz.Forbidden, '403 you need to have role .+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "eightuser") @defer.inlineCallbacks def test_DefaultDenyFalseContinuesCheck(self): # defaultDeny is True in the last rule so action is denied in the last check allow_rules = [ AnyEndpointMatcher(role="not-exists1", defaultDeny=False), AnyEndpointMatcher(role="not-exists2", defaultDeny=False), AnyEndpointMatcher(role="not-exists3", defaultDeny=True) ] self.setAllowRules(allow_rules) # check if action is denied and last check was exact against not-exist3 with self.assertRaisesRegex(authz.Forbidden, '.+not-exists3.+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") @defer.inlineCallbacks def test_DefaultDenyTrueStopsCheckIfFailed(self): # defaultDeny is True in the first rule so action is denied in the first check allow_rules = [ AnyEndpointMatcher(role="not-exists1", defaultDeny=True), AnyEndpointMatcher(role="not-exists2", defaultDeny=False), AnyEndpointMatcher(role="not-exists3", defaultDeny=False) ] self.setAllowRules(allow_rules) # check if action is denied and last check was exact against not-exist1 with self.assertRaisesRegex(authz.Forbidden, '.+not-exists1.+'): yield self.assertUserAllowed("builds/13", "rebuild", {}, "nineuser") buildbot-2.6.0/master/buildbot/test/unit/test_www_avatar.py000066400000000000000000000065421361162603000241600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth from buildbot.www import avatar class AvatarResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_default(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/') self.assertEqual( res, dict(redirected=avatar.AvatarResource.defaultAvatarUrl)) @defer.inlineCallbacks def test_gravatar(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce' b'def654fccc4a4d8?d=retro&s=32')) @defer.inlineCallbacks def test_custom(self): class CustomAvatar(avatar.AvatarBase): def getUserAvatar(self, email, size, defaultAvatarUrl): return defer.succeed((b"image/png", email + str(size).encode('utf-8') + defaultAvatarUrl)) master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[CustomAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, b"foo32http://a/b/img/nobody.png") @defer.inlineCallbacks def test_custom_not_found(self): # use gravatar if the custom avatar fail to return a response class CustomAvatar(avatar.AvatarBase): def getUserAvatar(self, email, size, defaultAvatarUrl): return defer.succeed(None) master = self.make_master(url=b'http://a/b/', auth=auth.NoAuth(), avatar_methods=[CustomAvatar(), avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce' b'def654fccc4a4d8?d=retro&s=32')) buildbot-2.6.0/master/buildbot/test/unit/test_www_config.py000066400000000000000000000107441361162603000241460ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import mock from twisted.internet import defer from twisted.python import log from twisted.python import util from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www import auth from buildbot.www import config class IndexResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_render(self): _auth = auth.NoAuth() _auth.maybeAutoLogin = mock.Mock() custom_versions = [ ['test compoent', '0.1.2'], ['test component 2', '0.2.1']] master = self.make_master( url='h:/a/b/', auth=_auth, versions=custom_versions) rsrc = config.IndexResource(master, "foo") rsrc.reconfigResource(master.config) rsrc.jinja = mock.Mock() template = mock.Mock() rsrc.jinja.get_template = lambda x: template template.render = lambda configjson, config, custom_templates: configjson vjson = [list(v) for v in rsrc.getEnvironmentVersions()] + custom_versions res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) _auth.maybeAutoLogin.assert_called_with(mock.ANY) exp = {"authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": { "name": "NoAuth"}, "user": {"anonymous": True}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None} self.assertEqual(res, exp) master.session.user_info = dict(name="me", email="me@me.org") res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) exp = {"authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": {"name": "NoAuth"}, "user": {"email": "me@me.org", "name": "me"}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None} self.assertEqual(res, exp) master = self.make_master( url='h:/a/c/', auth=_auth, versions=custom_versions) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/') res = json.loads(bytes2unicode(res)) exp = {"authz": {}, "titleURL": "http://buildbot.net", "versions": vjson, "title": "Buildbot", "auth": { "name": "NoAuth"}, "user": {"anonymous": True}, "buildbotURL": "h:/a/b/", "multiMaster": False, "port": None} self.assertEqual(res, exp) def test_parseCustomTemplateDir(self): exp = {'views/builds.html': '
\n
'} try: # we make the test work if pyjade is present or note # It is better than just skip if pyjade is not there import pyjade # pylint: disable=import-outside-toplevel [pyjade] exp.update({'plugin/views/plugin.html': '
this is customized
'}) except ImportError: log.msg("Only testing html based template override") template_dir = util.sibpath(__file__, "test_templates_dir") master = self.make_master(url='h:/a/b/') rsrc = config.IndexResource(master, "foo") res = rsrc.parseCustomTemplateDir(template_dir) self.assertEqual(res, exp) def test_CustomTemplateDir(self): master = self.make_master(url='h:/a/b/') rsrc = config.IndexResource(master, "foo") master.config.www['custom_templates_dir'] = 'foo' rsrc.parseCustomTemplateDir = mock.Mock(return_value="returnvalue") rsrc.reconfigResource(master.config) self.assertNotIn('custom_templates_dir', master.config.www) self.assertEqual('returnvalue', rsrc.custom_templates) buildbot-2.6.0/master/buildbot/test/unit/test_www_endpointmatchers.py000066400000000000000000000135761361162603000262560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.schedulers.forcesched import ForceScheduler from buildbot.test.fake import fakedb from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www.authz import endpointmatchers class EndpointBase(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/a/b/') self.db = self.master.db self.matcher = self.makeMatcher() self.matcher.setAuthz(self.master.authz) self.insertData() def makeMatcher(self): raise NotImplementedError() def assertMatch(self, match): self.assertTrue(match is not None) def assertNotMatch(self, match): self.assertTrue(match is None) def insertData(self): self.db.insertTestData([ fakedb.SourceStamp(id=13, branch='secret'), fakedb.Build( id=15, buildrequestid=16, masterid=1, workerid=2, builderid=21), fakedb.BuildRequest(id=16, buildsetid=17), fakedb.Buildset(id=17), fakedb.BuildsetSourceStamp(id=20, buildsetid=17, sourcestampid=13), fakedb.Builder(id=21, name="builder"), ]) class ValidEndpointMixin: @defer.inlineCallbacks def test_invalidPath(self): ret = yield self.matcher.match(("foo", "bar")) self.assertNotMatch(ret) class AnyEndpointMatcher(EndpointBase): def makeMatcher(self): return endpointmatchers.AnyEndpointMatcher(role="foo") @defer.inlineCallbacks def test_nominal(self): ret = yield self.matcher.match(("foo", "bar")) self.assertMatch(ret) class AnyControlEndpointMatcher(EndpointBase): def makeMatcher(self): return endpointmatchers.AnyControlEndpointMatcher(role="foo") @defer.inlineCallbacks def test_default_action(self): ret = yield self.matcher.match(("foo", "bar")) self.assertMatch(ret) @defer.inlineCallbacks def test_get(self): ret = yield self.matcher.match(("foo", "bar"), action="GET") self.assertNotMatch(ret) @defer.inlineCallbacks def test_other_action(self): ret = yield self.matcher.match(("foo", "bar"), action="foo") self.assertMatch(ret) class ViewBuildsEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.ViewBuildsEndpointMatcher(branch="secret", role="agent") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15")) self.assertMatch(ret) test_build.skip = "ViewBuildsEndpointMatcher is not implemented yet" class StopBuildEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.StopBuildEndpointMatcher(builder="builder", role="owner") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertMatch(ret) @defer.inlineCallbacks def test_build_no_match(self): self.matcher.builder = "foo" ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_build_no_builder(self): self.matcher.builder = None ret = yield self.matcher.match(("builds", "15"), "stop") self.assertMatch(ret) class ForceBuildEndpointMatcherBranch(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.ForceBuildEndpointMatcher(builder="builder", role="owner") def insertData(self): super().insertData() self.master.allSchedulers = lambda: [ ForceScheduler(name="sched1", builderNames=["builder"])] @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched(self): ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertMatch(ret) @defer.inlineCallbacks def test_noforcesched(self): ret = yield self.matcher.match(("forceschedulers", "sched2"), "force") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched_builder_no_match(self): self.matcher.builder = "foo" ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertNotMatch(ret) @defer.inlineCallbacks def test_forcesched_nobuilder(self): self.matcher.builder = None ret = yield self.matcher.match(("forceschedulers", "sched1"), "force") self.assertMatch(ret) class EnableSchedulerEndpointMatcher(EndpointBase, ValidEndpointMixin): def makeMatcher(self): return endpointmatchers.EnableSchedulerEndpointMatcher(role="agent") @defer.inlineCallbacks def test_build(self): ret = yield self.matcher.match(("builds", "15"), "stop") self.assertNotMatch(ret) @defer.inlineCallbacks def test_scheduler_enable(self): ret = yield self.matcher.match(("schedulers", "15"), "enable") self.assertMatch(ret) buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_base.py000066400000000000000000000110671361162603000250150ustar00rootroot00000000000000import json from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www.change_hook import ChangeHookResource from buildbot.www.hooks.base import BaseHookHandler def _prepare_base_change_hook(testcase, **options): return ChangeHookResource(dialects={ 'base': options }, master=fakeMasterForHooks(testcase)) def _prepare_request(payload, headers=None): if headers is None: headers = { b"Content-type": b"application/x-www-form-urlencoded", b"Accept": b"text/plain"} else: headers = {} if b'comments' not in payload: payload[b'comments'] = b'test_www_hook_base submission' # Required field request = FakeRequest() request.uri = b"/change_hook/base" request.method = b"POST" request.args = payload request.received_headers.update(headers) return request class TestChangeHookConfiguredWithBase(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_base_change_hook(self) @defer.inlineCallbacks def _check_base_with_change(self, payload): self.request = _prepare_request(payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] def _first_or_nothing(val): if isinstance(val, type([])): val = val[0] return bytes2unicode(val) if payload.get(b'files'): files = json.loads(_first_or_nothing(payload.get(b'files'))) else: files = [] self.assertEqual(change['files'], files) if payload.get(b'properties'): props = json.loads(_first_or_nothing(payload.get(b'properties'))) else: props = {} self.assertEqual(change['properties'], props) self.assertEqual( change['author'], _first_or_nothing(payload.get(b'author', payload.get(b'who')))) for field in ('revision', 'committer', 'comments', 'branch', 'category', 'revlink'): self.assertEqual( change[field], _first_or_nothing(payload.get(field.encode()))) for field in ('repository', 'project'): self.assertEqual( change[field], _first_or_nothing(payload.get(field.encode())) or '') def test_base_with_no_change(self): return self._check_base_with_change({}) def test_base_with_changes(self): self._check_base_with_change({ b'revision': [b'1234badcaca5678'], b'branch': [b'master'], b'comments': [b'Fix foo bar'], b'category': [b'bug'], b'revlink': [b'https://git.myproject.org/commit/1234badcaca5678'], b'repository': [b'myproject'], b'project': [b'myproject'], b'author': [b'me '], b'committer': [b'me '], b'files': [b'["src/main.c", "src/foo.c"]'], b'properties': [b'{"color": "blue", "important": true, "size": 2}'], }) class TestChangeHookConfiguredWithCustomBase(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() class CustomBase(BaseHookHandler): def getChanges(self, request): args = request.args chdict = dict( revision=args.get(b'revision'), repository=args.get(b'_repository') or '', project=args.get(b'project') or '', codebase=args.get(b'codebase')) return ([chdict], None) self.changeHook = _prepare_base_change_hook(self, custom_class=CustomBase) @defer.inlineCallbacks def _check_base_with_change(self, payload): self.request = _prepare_request(payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['repository'], payload.get(b'_repository') or '') def test_base_with_no_change(self): return self._check_base_with_change({b'repository': b'foo'}) buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_bitbucket.py000066400000000000000000000220611361162603000260530ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Manba Team from twisted.internet.defer import inlineCallbacks from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook from buildbot.www.hooks.bitbucket import _HEADER_EVENT gitJsonPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ { "author": "marcus", "branch": "master", "files": [ { "file": "somefile.py", "type": "modified" } ], "message": "Added some more things to somefile.py", "node": "620ade18607a", "parents": [ "702c70160afc" ], "raw_author": "Marcus Bertrand ", "raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9", "revision": null, "size": -1, "timestamp": "2012-05-30 05:58:56", "utctimestamp": "2012-05-30 03:58:56+00:00" } ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "git", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" mercurialJsonPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ { "author": "marcus", "branch": "master", "files": [ { "file": "somefile.py", "type": "modified" } ], "message": "Added some more things to somefile.py", "node": "620ade18607a", "parents": [ "702c70160afc" ], "raw_author": "Marcus Bertrand ", "raw_node": "620ade18607ac42d872b568bb92acaa9a28620e9", "revision": null, "size": -1, "timestamp": "2012-05-30 05:58:56", "utctimestamp": "2012-05-30 03:58:56+00:00" } ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "hg", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" gitJsonNoCommitsPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "git", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" mercurialJsonNoCommitsPayload = b"""{ "canon_url": "https://bitbucket.org", "commits": [ ], "repository": { "absolute_url": "/marcus/project-x/", "fork": false, "is_private": true, "name": "Project X", "owner": "marcus", "scm": "hg", "slug": "project-x", "website": "https://atlassian.com/" }, "user": "marcus" }""" class TestChangeHookConfiguredWithBitbucketChange(unittest.TestCase, TestReactorMixin): """Unit tests for BitBucket Change Hook """ def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucket': True}, master=fakeMasterForHooks(self)) @inlineCallbacks def testGitWithChange(self): change_dict = {b'payload': [gitJsonPayload]} request = FakeRequest(change_dict) request.received_headers[_HEADER_EVENT] = b"repo:push" request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['files'], ['somefile.py']) self.assertEqual( commit['repository'], 'https://bitbucket.org/marcus/project-x/') self.assertEqual( commit['when_timestamp'], 1338350336 ) self.assertEqual( commit['author'], 'Marcus Bertrand ') self.assertEqual( commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9') self.assertEqual( commit['comments'], 'Added some more things to somefile.py') self.assertEqual(commit['branch'], 'master') self.assertEqual( commit['revlink'], 'https://bitbucket.org/marcus/project-x/commits/' '620ade18607ac42d872b568bb92acaa9a28620e9' ) self.assertEqual( commit['properties']['event'], 'repo:push') @inlineCallbacks def testGitWithNoCommitsPayload(self): change_dict = {b'payload': [gitJsonNoCommitsPayload]} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'no change found') @inlineCallbacks def testMercurialWithChange(self): change_dict = {b'payload': [mercurialJsonPayload]} request = FakeRequest(change_dict) request.received_headers[_HEADER_EVENT] = b"repo:push" request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['files'], ['somefile.py']) self.assertEqual( commit['repository'], 'https://bitbucket.org/marcus/project-x/') self.assertEqual( commit['when_timestamp'], 1338350336 ) self.assertEqual( commit['author'], 'Marcus Bertrand ') self.assertEqual( commit['revision'], '620ade18607ac42d872b568bb92acaa9a28620e9') self.assertEqual( commit['comments'], 'Added some more things to somefile.py') self.assertEqual(commit['branch'], 'master') self.assertEqual( commit['revlink'], 'https://bitbucket.org/marcus/project-x/commits/' '620ade18607ac42d872b568bb92acaa9a28620e9' ) self.assertEqual( commit['properties']['event'], 'repo:push') @inlineCallbacks def testMercurialWithNoCommitsPayload(self): change_dict = {b'payload': [mercurialJsonNoCommitsPayload]} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'no change found') @inlineCallbacks def testWithNoJson(self): request = FakeRequest() request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b'Error processing changes.') request.setResponseCode.assert_called_with( 500, b'Error processing changes.') self.assertEqual(len(self.flushLoggedErrors()), 1) @inlineCallbacks def testGitWithChangeAndProject(self): change_dict = { b'payload': [gitJsonPayload], b'project': [b'project-name']} request = FakeRequest(change_dict) request.uri = b'/change_hook/bitbucket' request.method = b'POST' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) commit = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(commit['project'], 'project-name') buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_bitbucketcloud.py000066400000000000000000000660271361162603000271140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www import change_hook from buildbot.www.hooks.bitbucketcloud import _HEADER_EVENT _CT_JSON = b'application/json' pushJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": false, "new": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "old": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba" } } } ] } } """ pullRequestCreatedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "John Smith", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestUpdatedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestRejectedJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestFulfilledJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "pullrequest": { "id": "21", "title": "Branch 1496411680", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" } } """ deleteTagJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "ownerName": "BUIL", "public": false, "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ deleteBranchJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "ownerName": "CI", "public": false, "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "branch", "name": "branch_1496758965", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ newTagJsonPayload = """ { "actor": { "nickname": "John", "display_name": "John Smith" }, "repository": { "scm": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": { "href": "http://localhost:7990/projects/CI/repos/py-repo" }, "html": { "href": "http://localhost:7990/projects/CI/repos/py-repo" } }, "public": false, "ownerName": "CI", "owner": { "nickname": "CI", "display_name": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": true, "closed": false, "old": null, "new": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } } } ] } } """ def _prepare_request(payload, headers=None, change_dict=None): headers = headers or {} request = FakeRequest(change_dict) request.uri = b"/change_hook/bitbucketcloud" request.method = b"POST" if isinstance(payload, str): payload = unicode2bytes(payload) request.content = BytesIO(payload) request.received_headers[b'Content-Type'] = _CT_JSON request.received_headers.update(headers) return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucketcloud': {}}, master=fakeMasterForHooks(self)) def _checkPush(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['revision'], '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['comments'], 'Bitbucket Cloud commit ' '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['revlink'], 'http://localhost:7990/projects/CI/repos/py-repo/commits/' '793d4754230023d85532f9a38dba3290f959beb4') @defer.inlineCallbacks def testHookWithChangeOnPushEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithNonDictOption(self): self.change_hook.dialects = {'bitbucketcloud': True} yield self.testHookWithChangeOnPushEvent() def _checkPullRequest(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['comments'], 'Bitbucket Cloud Pull Request #21') self.assertEqual(change['revlink'], 'http://localhost:7990/projects/' 'CI/repos/py-repo/pull-requests/21') self.assertEqual(change['revision'], 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba') pr_url = change['properties'].get('pullrequesturl') self.assertNotEqual(pr_url, None) self.assertEqual( pr_url, "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21") @defer.inlineCallbacks def testHookWithChangeOnPullRequestCreated(self): request = _prepare_request( pullRequestCreatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:created'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-created') @defer.inlineCallbacks def testHookWithChangeOnPullRequestUpdated(self): request = _prepare_request( pullRequestUpdatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:updated'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-updated') @defer.inlineCallbacks def testHookWithChangeOnPullRequestRejected(self): request = _prepare_request( pullRequestRejectedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:rejected'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'pull-rejected') @defer.inlineCallbacks def testHookWithChangeOnPullRequestFulfilled(self): request = _prepare_request( pullRequestFulfilledJsonPayload, headers={_HEADER_EVENT: 'pullrequest:fulfilled'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/master') self.assertEqual(change['category'], 'pull-fulfilled') @defer.inlineCallbacks def _checkCodebase(self, event_type, expected_codebase): payloads = { 'repo:push': pushJsonPayload, 'pullrequest:updated': pullRequestUpdatedJsonPayload} request = _prepare_request( payloads[event_type], headers={_HEADER_EVENT: event_type}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], expected_codebase) @defer.inlineCallbacks def testHookWithCodebaseValueOnPushEvent(self): self.change_hook.dialects = { 'bitbucketcloud': {'codebase': 'super-codebase'}} yield self._checkCodebase('repo:push', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPushEvent(self): self.change_hook.dialects = { 'bitbucketcloud': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('repo:push', 'CI') @defer.inlineCallbacks def testHookWithCodebaseValueOnPullEvent(self): self.change_hook.dialects = { 'bitbucketcloud': {'codebase': 'super-codebase'}} yield self._checkCodebase('pullrequest:updated', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPullEvent(self): self.change_hook.dialects = { 'bitbucketcloud': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('pullrequest:updated', 'CI') @defer.inlineCallbacks def testHookWithUnhandledEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'invented:event'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown event: invented_event") @defer.inlineCallbacks def testHookWithChangeOnCreateTag(self): request = _prepare_request( newTagJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnDeleteTag(self): request = _prepare_request( deleteTagJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithChangeOnDeleteBranch(self): request = _prepare_request( deleteBranchJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496758965') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithInvalidContentType(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: b'repo:push'}) request.received_headers[b'Content-Type'] = b'invalid/content' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown content type: invalid/content") buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_bitbucketserver.py000066400000000000000000000701231361162603000273040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www import change_hook from buildbot.www.hooks.bitbucketserver import _HEADER_EVENT _CT_JSON = b'application/json' pushJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": false, "new": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "old": { "type": "branch", "name": "branch_1496411680", "target": { "type": "commit", "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba" } } } ] } } """ pullRequestCreatedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "John Smith", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestUpdatedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestRejectedJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "dot 1496311906", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ pullRequestFulfilledJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "pullrequest": { "id": "21", "title": "Branch 1496411680", "link": "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21", "authorLogin": "Buildbot", "fromRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "authorTimestamp": 0 }, "branch": { "rawNode": "a87e21f7433d8c16ac7be7413483fbb76c72a8ba", "name": "branch_1496411680" } }, "toRef": { "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "commit": { "message": null, "date": null, "hash": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "authorTimestamp": 0 }, "branch": { "rawNode": "7aebbb0089c40fce138a6d0b36d2281ea34f37f5", "name": "master" } } }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" } } """ deleteTagJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "ownerName": "BUIL", "public": false, "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ deleteBranchJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "ownerName": "CI", "public": false, "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": false, "closed": true, "old": { "type": "branch", "name": "branch_1496758965", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } }, "new": null } ] } } """ newTagJsonPayload = """ { "actor": { "username": "John", "displayName": "John Smith" }, "repository": { "scmId": "git", "project": { "key": "CI", "name": "Continuous Integration" }, "slug": "py-repo", "links": { "self": [ { "href": "http://localhost:7990/projects/CI/repos/py-repo/browse" } ] }, "public": false, "ownerName": "CI", "owner": { "username": "CI", "displayName": "CI" }, "fullName": "CI/py-repo" }, "push": { "changes": [ { "created": true, "closed": false, "old": null, "new": { "type": "tag", "name": "1.0.0", "target": { "type": "commit", "hash": "793d4754230023d85532f9a38dba3290f959beb4" } } } ] } } """ def _prepare_request(payload, headers=None, change_dict=None): headers = headers or {} request = FakeRequest(change_dict) request.uri = b"/change_hook/bitbucketserver" request.method = b"POST" if isinstance(payload, str): payload = unicode2bytes(payload) request.content = BytesIO(payload) request.received_headers[b'Content-Type'] = _CT_JSON request.received_headers.update(headers) return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.change_hook = change_hook.ChangeHookResource( dialects={'bitbucketserver': {}}, master=fakeMasterForHooks(self)) def _checkPush(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo/') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['revision'], '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['comments'], 'Bitbucket Server commit ' '793d4754230023d85532f9a38dba3290f959beb4') self.assertEqual( change['revlink'], 'http://localhost:7990/projects/CI/repos/py-repo/commits/' '793d4754230023d85532f9a38dba3290f959beb4') @defer.inlineCallbacks def testHookWithChangeOnRefsChangedEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnPushEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'repo:push'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithNonDictOption(self): self.change_hook.dialects = {'bitbucketserver': True} yield self.testHookWithChangeOnPushEvent() def _checkPullRequest(self, change): self.assertEqual( change['repository'], 'http://localhost:7990/projects/CI/repos/py-repo/') self.assertEqual(change['author'], 'John Smith ') self.assertEqual(change['project'], 'Continuous Integration') self.assertEqual(change['comments'], 'Bitbucket Server Pull Request #21') self.assertEqual(change['revlink'], 'http://localhost:7990/projects/' 'CI/repos/py-repo/pull-requests/21') self.assertEqual(change['revision'], 'a87e21f7433d8c16ac7be7413483fbb76c72a8ba') pr_url = change['properties'].get('pullrequesturl') self.assertNotEqual(pr_url, None) self.assertEqual( pr_url, "http://localhost:7990/projects/CI/repos/py-repo/pull-requests/21") @defer.inlineCallbacks def testHookWithChangeOnPullRequestCreated(self): request = _prepare_request( pullRequestCreatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:created'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-created') @defer.inlineCallbacks def testHookWithChangeOnPullRequestUpdated(self): request = _prepare_request( pullRequestUpdatedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:updated'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/pull-requests/21/merge') self.assertEqual(change['category'], 'pull-updated') @defer.inlineCallbacks def testHookWithChangeOnPullRequestRejected(self): request = _prepare_request( pullRequestRejectedJsonPayload, headers={_HEADER_EVENT: 'pullrequest:rejected'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496411680') self.assertEqual(change['category'], 'pull-rejected') @defer.inlineCallbacks def testHookWithChangeOnPullRequestFulfilled(self): request = _prepare_request( pullRequestFulfilledJsonPayload, headers={_HEADER_EVENT: 'pullrequest:fulfilled'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPullRequest(change) self.assertEqual(change['branch'], 'refs/heads/master') self.assertEqual(change['category'], 'pull-fulfilled') @defer.inlineCallbacks def _checkCodebase(self, event_type, expected_codebase): payloads = { 'repo:refs_changed': pushJsonPayload, 'pullrequest:updated': pullRequestUpdatedJsonPayload} request = _prepare_request( payloads[event_type], headers={_HEADER_EVENT: event_type}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], expected_codebase) @defer.inlineCallbacks def testHookWithCodebaseValueOnPushEvent(self): self.change_hook.dialects = { 'bitbucketserver': {'codebase': 'super-codebase'}} yield self._checkCodebase('repo:refs_changed', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPushEvent(self): self.change_hook.dialects = { 'bitbucketserver': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('repo:refs_changed', 'CI') @defer.inlineCallbacks def testHookWithCodebaseValueOnPullEvent(self): self.change_hook.dialects = { 'bitbucketserver': {'codebase': 'super-codebase'}} yield self._checkCodebase('pullrequest:updated', 'super-codebase') @defer.inlineCallbacks def testHookWithCodebaseFunctionOnPullEvent(self): self.change_hook.dialects = { 'bitbucketserver': { 'codebase': lambda payload: payload['repository']['project']['key']}} yield self._checkCodebase('pullrequest:updated', 'CI') @defer.inlineCallbacks def testHookWithUnhandledEvent(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: 'invented:event'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown event: invented_event") @defer.inlineCallbacks def testHookWithChangeOnCreateTag(self): request = _prepare_request( newTagJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'push') @defer.inlineCallbacks def testHookWithChangeOnDeleteTag(self): request = _prepare_request( deleteTagJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/tags/1.0.0') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithChangeOnDeleteBranch(self): request = _prepare_request( deleteBranchJsonPayload, headers={_HEADER_EVENT: 'repo:refs_changed'}) yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 1) change = self.change_hook.master.data.updates.changesAdded[0] self._checkPush(change) self.assertEqual(change['branch'], 'refs/heads/branch_1496758965') self.assertEqual(change['category'], 'ref-deleted') @defer.inlineCallbacks def testHookWithInvalidContentType(self): request = _prepare_request( pushJsonPayload, headers={_HEADER_EVENT: b'repo:refs_changed'}) request.received_headers[b'Content-Type'] = b'invalid/content' yield request.test_render(self.change_hook) self.assertEqual(len(self.change_hook.master.data.updates.changesAdded), 0) self.assertEqual(request.written, b"Unknown content type: invalid/content") buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_github.py000066400000000000000000001402741361162603000253700ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hmac from copy import deepcopy from hashlib import sha1 from io import BytesIO from twisted.internet import defer from twisted.trial import unittest from buildbot.plugins import util from buildbot.secrets.manager import SecretManager from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.util import unicode2bytes from buildbot.www.change_hook import ChangeHookResource from buildbot.www.hooks.github import _HEADER_EVENT from buildbot.www.hooks.github import _HEADER_SIGNATURE from buildbot.www.hooks.github import GitHubEventHandler # Sample GITHUB commit payload from http://help.github.com/post-receive-hooks/ # Added "modified" and "removed", and change email # Added "head_commit" # https://developer.github.com/v3/activity/events/types/#webhook-payload-example-26 gitJsonPayload = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadCiSkipTemplate = """ { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad %(skip)s", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad %(skip)s", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadTag = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] }, { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] } ], "head_commit": { "id": "de8251ff97ee194a289832576287d6f8ad74e3d0", "url": "http://github.com/defunkt/github/commit/de8251ff97ee194a289832576287d6f8ad74e3d0", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "update pricing a tad", "timestamp": "2008-02-15T14:36:34-08:00", "modified": ["modfile"], "removed": ["removedFile"] }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/tags/v1.0.0" } """ gitJsonPayloadNonBranch = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ { "id": "41a212ee83ca127e3c8cf465891ab7216a705f59", "distinct": true, "url": "http://github.com/defunkt/github/commit/41a212ee83ca127e3c8cf465891ab7216a705f59", "author": { "email": "fred@flinstone.org", "name": "Fred Flinstone" }, "committer": { "email": "freddy@flinstone.org", "name": "Freddy Flinstone" }, "message": "okay i give in", "timestamp": "2008-02-15T14:57:17-08:00", "added": ["filepath.rb"] } ], "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/garbage/master" } """ gitJsonPayloadPullRequest = b""" { "action": "opened", "number": 50, "pull_request": { "url": "https://api.github.com/repos/defunkt/github/pulls/50", "html_url": "https://github.com/defunkt/github/pull/50", "number": 50, "state": "open", "title": "Update the README with new information", "user": { "login": "defunkt", "id": 42, "type": "User" }, "body": "This is a pretty simple change that we need to pull into master.", "created_at": "2014-10-10T00:09:50Z", "updated_at": "2014-10-10T00:09:50Z", "closed_at": null, "merged_at": null, "merge_commit_sha": "cd3ff078a350901f91f4c4036be74f91d0b0d5d6", "head": { "label": "defunkt:changes", "ref": "changes", "sha": "05c588ba8cd510ecbe112d020f215facb17817a7", "user": { "login": "defunkt", "id": 42, "type": "User" }, "repo": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" } }, "base": { "label": "defunkt:master", "ref": "master", "sha": "69a8b72e2d3d955075d47f03d902929dcaf74034", "user": { "login": "defunkt", "id": 42, "type": "User" }, "repo": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" } }, "_links": { "self": { "href": "https://api.github.com/repos/defunkt/github/pulls/50" }, "html": { "href": "https://github.com/defunkt/github/pull/50" }, "commits": { "href": "https://api.github.com/repos/defunkt/github/pulls/50/commits" } }, "commits": 1, "additions": 2, "deletions": 0, "changed_files": 1 }, "repository": { "id": 43, "name": "github", "full_name": "defunkt/github", "owner": { "login": "defunkt", "id": 42, "type": "User" }, "html_url": "https://github.com/defunkt/github", "description": "", "url": "https://api.github.com/repos/defunkt/github", "created_at": "2014-05-20T22:39:43Z", "updated_at": "2014-07-25T16:37:51Z", "pushed_at": "2014-10-10T00:09:49Z", "git_url": "git://github.com/defunkt/github.git", "ssh_url": "git@github.com:defunkt/github.git", "clone_url": "https://github.com/defunkt/github.git", "default_branch": "master" }, "sender": { "login": "defunkt", "id": 42, "type": "User" } } """ gitJsonPayloadCommit = { "sha": "de8251ff97ee194a289832576287d6f8ad74e3d0", "commit": { "author": { "name": "defunkt", "email": "fred@flinstone.org", "date": "2017-02-12T14:39:33Z" }, "committer": { "name": "defunkt", "email": "fred@flinstone.org", "date": "2017-02-12T14:51:05Z" }, "message": "black magic", "tree": { }, "url": "...", "comment_count": 0 }, "url": "...", "html_url": "...", "comments_url": "...", "author": {}, "committer": {}, "parents": [], "stats": {}, "files": [] } gitPRproperties = { 'github.head.sha': '05c588ba8cd510ecbe112d020f215facb17817a7', 'github.state': 'open', 'github.base.repo.full_name': 'defunkt/github', 'github.number': 50, 'github.base.ref': 'master', 'github.base.sha': '69a8b72e2d3d955075d47f03d902929dcaf74034', 'github.head.repo.full_name': 'defunkt/github', 'github.merged_at': None, 'github.head.ref': 'changes', 'github.closed_at': None, 'github.title': 'Update the README with new information', 'event': 'pull_request' } gitJsonPayloadEmpty = b""" { "before": "5aef35982fb2d34e9d9d4502f6ede1072793222d", "repository": { "url": "http://github.com/defunkt/github", "html_url": "http://github.com/defunkt/github", "name": "github", "full_name": "defunkt/github", "description": "You're lookin' at it.", "watchers": 5, "forks": 2, "private": 1, "owner": { "email": "fred@flinstone.org", "name": "defunkt" } }, "commits": [ ], "head_commit": { }, "after": "de8251ff97ee194a289832576287d6f8ad74e3d0", "ref": "refs/heads/master" } """ gitJsonPayloadCreateTag = b""" { "ref": "refs/tags/v0.9.15.post1", "before": "0000000000000000000000000000000000000000", "after": "ffe1e9affb2b5399369443194c02068032f9295e", "created": true, "deleted": false, "forced": false, "base_ref": null, "compare": "https://github.com/buildbot/buildbot/compare/v0.9.15.post1", "commits": [ ], "head_commit": { "id": "57df618a4a450410c1dee440c7827ee105f5a226", "tree_id": "f9768673dc968b5c8fcbb15f119ce237b50b3252", "distinct": true, "message": "...", "timestamp": "2018-01-07T16:30:52+01:00", "url": "https://github.com/buildbot/buildbot/commit/...", "author": { "name": "User", "email": "userid@example.com", "username": "userid" }, "committer": { "name": "GitHub", "email": "noreply@github.com", "username": "web-flow" }, "added": [ ], "removed": [ "master/buildbot/newsfragments/bit_length.bugfix", "master/buildbot/newsfragments/localworker_umask.bugfix", "master/buildbot/newsfragments/svn-utf8.bugfix" ], "modified": [ ".bbtravis.yml", "circle.yml", "master/docs/relnotes/index.rst" ] }, "repository": { "html_url": "https://github.com/buildbot/buildbot", "name": "buildbot", "full_name": "buildbot" }, "pusher": { "name": "userid", "email": "userid@example.com" }, "organization": { "login": "buildbot", "url": "https://api.github.com/orgs/buildbot", "description": "Continous integration and delivery framework" }, "sender": { "login": "userid", "gravatar_id": "", "type": "User", "site_admin": false }, "ref_name": "v0.9.15.post1", "distinct_commits": [ ] }""" _HEADER_CT = b'Content-Type' _CT_ENCODED = b'application/x-www-form-urlencoded' _CT_JSON = b'application/json' def _prepare_github_change_hook(testcase, **params): return ChangeHookResource(dialects={ 'github': params }, master=fakeMasterForHooks(testcase)) def _prepare_request(event, payload, _secret=None, headers=None): if headers is None: headers = dict() request = FakeRequest() request.uri = b"/change_hook/github" request.method = b"GET" request.received_headers = { _HEADER_EVENT: event } assert isinstance(payload, (bytes, list)), \ "payload can only be bytes or list, not {}".format(type(payload)) if isinstance(payload, bytes): request.content = BytesIO(payload) request.received_headers[_HEADER_CT] = _CT_JSON if _secret is not None: signature = hmac.new(unicode2bytes(_secret), msg=unicode2bytes(payload), digestmod=sha1) request.received_headers[_HEADER_SIGNATURE] = \ 'sha1={}'.format(signature.hexdigest()) else: request.args[b'payload'] = payload request.received_headers[_HEADER_CT] = _CT_ENCODED request.received_headers.update(headers) # print request.received_headers return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_property_whitelist=["github.*"]) self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() def assertDictSubset(self, expected_dict, response_dict): expected = {} for key in expected_dict.keys(): self.assertIn(key, set(response_dict.keys())) expected[key] = response_dict[key] self.assertDictEqual(expected_dict, expected) @defer.inlineCallbacks def test_unknown_event(self): bad_event = b'whatever' self.request = _prepare_request(bad_event, gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Unknown event: ' + bad_event self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_unknown_content_type(self): bad_content_type = b'application/x-useful' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_CT: bad_content_type }) yield self.request.test_render(self.changeHook) expected = b'Unknown content type: ' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertIn(expected, self.request.written) @defer.inlineCallbacks def _check_ping(self, payload): self.request = _prepare_request(b'ping', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_ping_encoded(self): self._check_ping([b'{}']) def test_ping_json(self): self._check_ping(b'{}') @defer.inlineCallbacks def test_git_with_push_tag(self): self.request = _prepare_request(b'push', gitJsonPayloadTag) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["branch"], "v1.0.0") self.assertEqual(change["category"], "tag") @defer.inlineCallbacks def test_git_with_push_newtag(self): self.request = _prepare_request(b'push', gitJsonPayloadCreateTag) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["author"], "User ") self.assertEqual(change["branch"], "v0.9.15.post1") self.assertEqual(change["category"], "tag") # Test 'base' hook with attributes. We should get a json string # representing a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") self.assertEqual(change["properties"]["event"], "push") def test_git_with_change_encoded(self): self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): self._check_git_with_change(gitJsonPayload) # Test that, even with commits not marked as distinct, the changes get # recorded each time we receive the payload. This is important because # without it, commits can get pushed to a non-scheduled branch, get # recorded and associated with that branch, and then later get pushed to a # scheduled branch and not trigger a build. # # For example, if a commit is pushed to a dev branch, it then gets recorded # as a change associated with that dev branch. If that change is later # pushed to master, we still need to trigger a build even though we've seen # the commit before. @defer.inlineCallbacks def testGitWithDistinctFalse(self): self.request = _prepare_request(b'push', [gitJsonPayload.replace(b'"distinct": true,', b'"distinct": false,')]) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") self.assertEqual(change["properties"]["github_distinct"], False) change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") @defer.inlineCallbacks def testGitWithNoJson(self): self.request = _prepare_request(b'push', b'') yield self.request.test_render(self.changeHook) expected = b"Expecting value: line 1 column 1 (char 0)" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) @defer.inlineCallbacks def _check_git_with_no_changes(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) expected = b"no change found" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) def test_git_with_no_changes_encoded(self): self._check_git_with_no_changes([gitJsonPayloadEmpty]) def test_git_with_no_changes_json(self): self._check_git_with_no_changes(gitJsonPayloadEmpty) @defer.inlineCallbacks def _check_git_with_non_branch_changes(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) expected = b"no change found" self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) def test_git_with_non_branch_changes_encoded(self): self._check_git_with_non_branch_changes([gitJsonPayloadNonBranch]) def test_git_with_non_branch_changes_json(self): self._check_git_with_non_branch_changes(gitJsonPayloadNonBranch) @defer.inlineCallbacks def _check_git_with_pull(self, payload): self.request = _prepare_request('pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "https://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1412899790) self.assertEqual(change["author"], "defunkt") self.assertEqual(change["revision"], '05c588ba8cd510ecbe112d020f215facb17817a7') self.assertEqual(change["comments"], "GitHub Pull Request #50 (1 commit)\n" "Update the README with new information\n" "This is a pretty simple change that we need to pull into master.") self.assertEqual(change["branch"], "refs/pull/50/merge") self.assertEqual(change["revlink"], "https://github.com/defunkt/github/pull/50") self.assertEqual(change['properties']['basename'], "master") self.assertDictSubset(gitPRproperties, change["properties"]) def test_git_with_pull_encoded(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self._check_git_with_pull([gitJsonPayloadPullRequest]) def test_git_with_pull_json(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self._check_git_with_pull(gitJsonPayloadPullRequest) @defer.inlineCallbacks def _check_git_push_with_skip_message(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_git_push_with_skip_message(self): gitJsonPayloadCiSkips = [ unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ci skip]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[skip ci]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ ci skip ]'}), ] for payload in gitJsonPayloadCiSkips: self._check_git_push_with_skip_message(payload) @defer.inlineCallbacks def _check_git_pull_request_with_skip_message(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_git_pull_request_with_skip_message(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' commit = deepcopy(gitJsonPayloadCommit) msgs = ( 'black magic [ci skip]', 'black magic [skip ci]', 'black magic [ ci skip ]', ) for msg in msgs: commit['commit']['message'] = msg self._http.expect('get', api_endpoint, content_json=commit) self._check_git_pull_request_with_skip_message( gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithGitChangeCustomPullrequestRef( unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_property_whitelist=["github.*"], pullrequest_ref="head") self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_git_pull_request_with_custom_ref(self): commit = deepcopy([gitJsonPayloadPullRequest]) api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self.request = _prepare_request('pull_request', commit) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["branch"], "refs/pull/50/head") class TestChangeHookConfiguredWithCustomSkips(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, skips=[r'\[ *bb *skip *\]']) self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_push_with_skip_message(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_push_with_skip_message(self): gitJsonPayloadCiSkips = [ unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[bb skip]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ bb skip ]'}), ] for payload in gitJsonPayloadCiSkips: self._check_push_with_skip_message(payload) @defer.inlineCallbacks def _check_push_no_ci_skip(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) def test_push_no_ci_skip(self): # user overrode the skip pattern already, # so the default patterns should not work. payload = gitJsonPayloadCiSkipTemplate % {'skip': '[ci skip]'} payload = unicode2bytes(payload) self._check_push_no_ci_skip(payload) @defer.inlineCallbacks def _check_pull_request_with_skip_message(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) def test_pull_request_with_skip_message(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' commit = deepcopy(gitJsonPayloadCommit) msgs = ( 'black magic [bb skip]', 'black magic [ bb skip ]', ) for msg in msgs: commit['commit']['message'] = msg self._http.expect('get', api_endpoint, content_json=commit) self._check_pull_request_with_skip_message( gitJsonPayloadPullRequest) @defer.inlineCallbacks def _check_pull_request_no_skip(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request_no_skip(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' commit = deepcopy(gitJsonPayloadCommit) commit['commit']['message'] = 'black magic [skip bb]' # pattern not matched self._http.expect('get', api_endpoint, content_json=commit) self._check_pull_request_no_skip(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithAuth(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, token=_token) self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://api.github.com', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self._check_pull_request(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithCustomApiRoot(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, strict=False, github_api_endpoint='https://black.magic.io') self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot'} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://black.magic.io', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self._check_pull_request(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithCustomApiRootWithAuth(unittest.TestCase, TestReactorMixin): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() _token = '7e076f41-b73a-4045-a817' self.changeHook = _prepare_github_change_hook( self, strict=False, github_api_endpoint='https://black.magic.io', token=_token) self.master = self.changeHook.master fake_headers = {'User-Agent': 'Buildbot', 'Authorization': 'token ' + _token} self._http = yield fakehttpclientservice.HTTPClientService.getFakeService( self.master, self, 'https://black.magic.io', headers=fake_headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def _check_pull_request(self, payload): self.request = _prepare_request(b'pull_request', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) def test_pull_request(self): api_endpoint = '/repos/defunkt/github/commits/05c588ba8cd510ecbe112d020f215facb17817a7' self._http.expect('get', api_endpoint, content_json=gitJsonPayloadCommit) self._check_pull_request(gitJsonPayloadPullRequest) class TestChangeHookConfiguredWithStrict(unittest.TestCase, TestReactorMixin): _SECRET = 'somethingreallysecret' def setUp(self): self.setUpTestReactor() fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"secret_key": self._SECRET}) secretService = SecretManager() secretService.services = [fakeStorageService] self.changeHook = _prepare_github_change_hook(self, strict=True, secret=util.Secret("secret_key")) self.changeHook.master.addService(secretService) @defer.inlineCallbacks def test_signature_ok(self): self.request = _prepare_request(b'push', gitJsonPayload, _secret=self._SECRET) yield self.request.test_render(self.changeHook) # Can it somehow be merged w/ the same code above in a different class? self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['files'], ['filepath.rb']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203116237) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["revision"], '41a212ee83ca127e3c8cf465891ab7216a705f59') self.assertEqual(change["comments"], "okay i give in") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change['files'], ['modfile', 'removedFile']) self.assertEqual(change["repository"], "http://github.com/defunkt/github") self.assertEqual(change["when_timestamp"], 1203114994) self.assertEqual(change["author"], "Fred Flinstone ") self.assertEqual(change["committer"], "Freddy Flinstone ") self.assertEqual(change["src"], "git") self.assertEqual(change["revision"], 'de8251ff97ee194a289832576287d6f8ad74e3d0') self.assertEqual(change["comments"], "update pricing a tad") self.assertEqual(change["branch"], "master") self.assertEqual(change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0") @defer.inlineCallbacks def test_unknown_hash(self): bad_hash_type = b'blah' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_hash_type + b'=doesnotmatter' }) yield self.request.test_render(self.changeHook) expected = b'Unknown hash type: ' + bad_hash_type self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_signature_nok(self): bad_signature = b'sha1=wrongstuff' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_signature }) yield self.request.test_render(self.changeHook) expected = b'Hash mismatch' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_missing_secret(self): # override the value assigned in setUp self.changeHook = _prepare_github_change_hook(self, strict=True) self.request = _prepare_request(b'push', gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Strict mode is requested while no secret is provided' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_wrong_signature_format(self): bad_signature = b'hash=value=something' self.request = _prepare_request(b'push', gitJsonPayload, headers={ _HEADER_SIGNATURE: bad_signature }) yield self.request.test_render(self.changeHook) expected = b'Wrong signature format: ' + bad_signature self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) @defer.inlineCallbacks def test_signature_missing(self): self.request = _prepare_request(b'push', gitJsonPayload) yield self.request.test_render(self.changeHook) expected = b'Request has no required signature' self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) class TestChangeHookConfiguredWithCodebaseValue(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook(self, codebase='foobar') @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], 'foobar') def test_git_with_change_encoded(self): return self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): return self._check_git_with_change(gitJsonPayload) def _codebase_function(payload): return 'foobar-' + payload['repository']['name'] class TestChangeHookConfiguredWithCodebaseFunction(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = _prepare_github_change_hook( self, codebase=_codebase_function) @defer.inlineCallbacks def _check_git_with_change(self, payload): self.request = _prepare_request(b'push', payload) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change['codebase'], 'foobar-github') def test_git_with_change_encoded(self): return self._check_git_with_change([gitJsonPayload]) def test_git_with_change_json(self): return self._check_git_with_change(gitJsonPayload) class TestChangeHookConfiguredWithCustomEventHandler(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() class CustomGitHubEventHandler(GitHubEventHandler): def handle_ping(self, _, __): self.master.hook_called = True return [], None self.changeHook = _prepare_github_change_hook( self, **{'class': CustomGitHubEventHandler}) @defer.inlineCallbacks def test_ping(self): self.request = _prepare_request(b'ping', b'{}') yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertTrue(self.changeHook.master.hook_called) buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_gitlab.py000066400000000000000000001272171361162603000253520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.plugins import util from buildbot.secrets.manager import SecretManager from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook from buildbot.www.hooks.gitlab import _HEADER_EVENT from buildbot.www.hooks.gitlab import _HEADER_GITLAB_TOKEN # Sample GITLAB commit payload from https://docs.gitlab.com/ce/user/project/integrations/webhooks.html # Added "modified" and "removed", and change email gitJsonPayload = b""" { "before": "95790bf891e76fee5e1747ab589903a6a1f80f22", "after": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "ref": "refs/heads/master", "user_id": 4, "user_name": "John Smith", "repository": { "name": "Diaspora", "url": "git@localhost:diaspora.git", "description": "", "homepage": "http://localhost/diaspora" }, "commits": [ { "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "message": "Update Catalan translation to e38cb41.", "timestamp": "2011-12-12T14:27:31+02:00", "url": "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "author": { "name": "Jordi Mallach", "email": "jordi@softcatala.org" } }, { "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "message": "fixed readme", "timestamp": "2012-01-03T23:36:29+02:00", "url": "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "author": { "name": "GitLab dev user", "email": "gitlabdev@dv6700.(none)" } } ], "total_commits_count": 2 } """ gitJsonPayloadTag = b""" { "object_kind": "tag_push", "before": "0000000000000000000000000000000000000000", "after": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", "ref": "refs/tags/v1.0.0", "checkout_sha": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", "user_id": 1, "user_name": "John Smith", "repository":{ "name": "Example", "url": "git@localhost:diaspora.git", "description": "", "homepage": "http://example.com/jsmith/example", "git_http_url":"http://example.com/jsmith/example.git", "git_ssh_url":"git@example.com:jsmith/example.git", "visibility_level":0 }, "commits": [ { "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "message": "Update Catalan translation to e38cb41.", "timestamp": "2011-12-12T14:27:31+02:00", "url": "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "author": { "name": "Jordi Mallach", "email": "jordi@softcatala.org" } }, { "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "message": "fixed readme", "timestamp": "2012-01-03T23:36:29+02:00", "url": "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "author": { "name": "GitLab dev user", "email": "gitlabdev@dv6700.(none)" } } ], "total_commits_count": 2 } """ # == Merge requests from a different branch of the same project # GITLAB commit payload from an actual version 10.7.1-ee gitlab instance # chronicling the lives and times of a trivial MR through the operations # open, edit description, add commit, close, and reopen, in that order. # (Tidied with json_pp --json_opt=canonical,pretty and an editor.) # FIXME: only show diffs here to keep file smaller and increase clarity gitJsonPayloadMR_open = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "open", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "This to both gitlab gateways!", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "92268bc781b24f0a61b907da062950e9e5252a69", "message" : "Remove the dummy line again", "timestamp" : "2018-05-14T07:54:04-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/92268bc781b24f0a61b907da062950e9e5252a69" }, "last_edited_at" : null, "last_edited_by_id" : null, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:45:37 -0700", "updated_by_id" : null, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ gitJsonPayloadMR_editdesc = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "update", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "92268bc781b24f0a61b907da062950e9e5252a69", "message" : "Remove the dummy line again", "timestamp" : "2018-05-14T07:54:04-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/92268bc781b24f0a61b907da062950e9e5252a69" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:49:55 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ gitJsonPayloadMR_addcommit = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "update", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29931, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 14:49:55 UTC", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "oldrev" : "92268bc781b24f0a61b907da062950e9e5252a69", "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 14:51:27 UTC", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ gitJsonPayloadMR_close = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "close", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29958, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "closed", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:52:01 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ gitJsonPayloadMR_reopen = b""" { "event_type" : "merge_request", "object_attributes" : { "action" : "reopen", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-15 07:45:37 -0700", "description" : "Edited description.", "head_pipeline_id" : 29958, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10850, "iid" : 6, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "cee8b01dcbaeed89563c2822f7c59a93c813eb6b", "message" : "debian/compat: update to 9", "timestamp" : "2018-05-15T07:51:11-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/cee8b01dcbaeed89563c2822f7c59a93c813eb6b" }, "last_edited_at" : "2018-05-15 07:49:55 -0700", "last_edited_by_id" : 15, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : 0 }, "merge_status" : "can_be_merged", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 239, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Remove the dummy line again", "total_time_spent" : 0, "updated_at" : "2018-05-15 07:53:27 -0700", "updated_by_id" : 15, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/6", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ # == Merge requests from a fork of the project # (Captured more accurately than above test data) gitJsonPayloadMR_open_forked = b""" { "changes" : { "total_time_spent" : { "current" : 0, "previous" : null } }, "event_type" : "merge_request", "labels" : [], "object_attributes" : { "action" : "open", "assignee_id" : null, "author_id" : 15, "created_at" : "2018-05-19 06:57:12 -0700", "description" : "This is a merge request from a fork of the project.", "head_pipeline_id" : null, "human_time_estimate" : null, "human_total_time_spent" : null, "id" : 10914, "iid" : 7, "last_commit" : { "author" : { "email" : "mmusterman@example.com", "name" : "Max Mustermann" }, "id" : "e46ee239f3d6d41ade4d1e610669dd71ed86ec80", "message" : "Add note to README", "timestamp" : "2018-05-19T06:35:26-07:00", "url" : "https://gitlab.example.com/mmusterman/awesome_project/commit/e46ee239f3d6d41ade4d1e610669dd71ed86ec80" }, "last_edited_at" : null, "last_edited_by_id" : null, "merge_commit_sha" : null, "merge_error" : null, "merge_params" : { "force_remove_source_branch" : "0" }, "merge_status" : "unchecked", "merge_user_id" : null, "merge_when_pipeline_succeeds" : false, "milestone_id" : null, "source" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/build/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:build/awesome_project.git", "homepage" : "https://gitlab.example.com/build/awesome_project", "http_url" : "https://gitlab.example.com/build/awesome_project.git", "id" : 2337, "name" : "awesome_project", "namespace" : "build", "path_with_namespace" : "build/awesome_project", "ssh_url" : "git@gitlab.example.com:build/awesome_project.git", "url" : "git@gitlab.example.com:build/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/build/awesome_project" }, "source_branch" : "ms-viewport", "source_project_id" : 2337, "state" : "opened", "target" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "target_branch" : "master", "target_project_id" : 239, "time_estimate" : 0, "title" : "Add note to README", "total_time_spent" : 0, "updated_at" : "2018-05-19 06:57:12 -0700", "updated_by_id" : null, "url" : "https://gitlab.example.com/mmusterman/awesome_project/merge_requests/7", "work_in_progress" : false }, "object_kind" : "merge_request", "project" : { "avatar_url" : null, "ci_config_path" : null, "default_branch" : "master", "description" : "Trivial project for testing build machinery quickly", "git_http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "git_ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "http_url" : "https://gitlab.example.com/mmusterman/awesome_project.git", "id" : 239, "name" : "awesome_project", "namespace" : "mmusterman", "path_with_namespace" : "mmusterman/awesome_project", "ssh_url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git", "visibility_level" : 0, "web_url" : "https://gitlab.example.com/mmusterman/awesome_project" }, "repository" : { "description" : "Trivial project for testing build machinery quickly", "homepage" : "https://gitlab.example.com/mmusterman/awesome_project", "name" : "awesome_project", "url" : "git@gitlab.example.com:mmusterman/awesome_project.git" }, "user" : { "avatar_url" : "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40&d=identicon", "name" : "Max Mustermann", "username" : "mmusterman" } } """ def FakeRequestMR(content): request = FakeRequest(content=content) request.uri = b"/change_hook/gitlab" request.args = {b'codebase': [b'MyCodebase']} request.received_headers[_HEADER_EVENT] = b"Merge Request Hook" request.method = b"POST" return request class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() self.changeHook = change_hook.ChangeHookResource( dialects={'gitlab': True}, master=fakeMasterForHooks(self)) def check_changes_tag_event(self, r, project='', codebase=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1323692851 ) self.assertEqual(change["branch"], "v1.0.0") def check_changes_mr_event(self, r, project='awesome_project', codebase=None, timestamp=1526309644, source_repo=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "https://gitlab.example.com/mmusterman/awesome_project.git") if source_repo is None: source_repo = "https://gitlab.example.com/mmusterman/awesome_project.git" self.assertEqual(change['properties']["source_repository"], source_repo) self.assertEqual(change['properties']["target_repository"], "https://gitlab.example.com/mmusterman/awesome_project.git") self.assertEqual( change["when_timestamp"], timestamp ) self.assertEqual(change["branch"], "master") self.assertEqual(change['properties']["source_branch"], 'ms-viewport') self.assertEqual(change['properties']["target_branch"], 'master') self.assertEqual(change["category"], "merge_request") self.assertEqual(change.get("project"), project) def check_changes_push_event(self, r, project='diaspora', codebase=None): self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1323692851 ) self.assertEqual( change["author"], "Jordi Mallach ") self.assertEqual( change["revision"], 'b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327') self.assertEqual( change["comments"], "Update Catalan translation to e38cb41.") self.assertEqual(change["branch"], "master") self.assertEqual(change[ "revlink"], "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327") change = self.changeHook.master.data.updates.changesAdded[1] self.assertEqual(change["repository"], "git@localhost:diaspora.git") self.assertEqual( change["when_timestamp"], 1325626589 ) self.assertEqual( change["author"], "GitLab dev user ") self.assertEqual(change["src"], "git") self.assertEqual( change["revision"], 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7') self.assertEqual(change["comments"], "fixed readme") self.assertEqual(change["branch"], "master") self.assertEqual(change[ "revlink"], "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7") # FIXME: should we convert project name to canonical case? # Or should change filter be case insensitive? self.assertEqual(change.get("project").lower(), project.lower()) self.assertEqual(change.get("codebase"), codebase) # Test 'base' hook with attributes. We should get a json string representing # a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def testGitWithChange(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res) @defer.inlineCallbacks def testGitWithChange_WithProjectToo(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'project': [b'Diaspora']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res, project="Diaspora") @defer.inlineCallbacks def testGitWithChange_WithCodebaseToo(self): self.request = FakeRequest(content=gitJsonPayload) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_push_event(res, codebase="MyCodebase") @defer.inlineCallbacks def testGitWithChange_WithPushTag(self): self.request = FakeRequest(content=gitJsonPayloadTag) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.method = b"POST" res = yield self.request.test_render(self.changeHook) self.check_changes_tag_event(res, codebase="MyCodebase") @defer.inlineCallbacks def testGitWithNoJson(self): self.request = FakeRequest() self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertIn(b"Error loading JSON:", self.request.written) self.request.setResponseCode.assert_called_with(400, mock.ANY) @defer.inlineCallbacks def test_event_property(self): self.request = FakeRequest(content=gitJsonPayload) self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["properties"]["event"], "Push Hook") self.assertEqual(change["category"], "Push Hook") @defer.inlineCallbacks def testGitWithChange_WithMR_open(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_open) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase") change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_editdesc(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_editdesc) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def testGitWithChange_WithMR_addcommit(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_addcommit) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase", timestamp=1526395871) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_close(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_close) yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def testGitWithChange_WithMR_reopen(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_reopen) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event(res, codebase="MyCodebase", timestamp=1526395871) change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") @defer.inlineCallbacks def testGitWithChange_WithMR_open_forked(self): self.request = FakeRequestMR(content=gitJsonPayloadMR_open_forked) res = yield self.request.test_render(self.changeHook) self.check_changes_mr_event( res, codebase="MyCodebase", timestamp=1526736926, source_repo="https://gitlab.example.com/build/awesome_project.git") change = self.changeHook.master.data.updates.changesAdded[0] self.assertEqual(change["category"], "merge_request") class TestChangeHookConfiguredWithSecret(unittest.TestCase, TestReactorMixin): _SECRET = 'thesecret' def setUp(self): self.setUpTestReactor() self.master = fakeMasterForHooks(self) fakeStorageService = FakeSecretStorage() fakeStorageService.reconfigService(secretdict={"secret_key": self._SECRET}) self.secretService = SecretManager() self.secretService.services = [fakeStorageService] self.master.addService(self.secretService) self.changeHook = change_hook.ChangeHookResource( dialects={'gitlab': {'secret': util.Secret("secret_key")}}, master=self.master) @defer.inlineCallbacks def test_missing_secret(self): self.request = FakeRequest(content=gitJsonPayloadTag) self.request.uri = b"/change_hook/gitlab" self.request.args = {b'codebase': [b'MyCodebase']} self.request.method = b"POST" self.request.received_headers[_HEADER_EVENT] = b"Push Hook" yield self.request.test_render(self.changeHook) expected = b'Invalid secret' self.assertEqual(self.request.written, expected) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) @defer.inlineCallbacks def test_valid_secret(self): self.request = FakeRequest(content=gitJsonPayload) self.request.received_headers[_HEADER_GITLAB_TOKEN] = self._SECRET self.request.received_headers[_HEADER_EVENT] = b"Push Hook" self.request.uri = b"/change_hook/gitlab" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 2) buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_gitorious.py000066400000000000000000000103701361162603000261230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake.web import FakeRequest from buildbot.test.fake.web import fakeMasterForHooks from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook # Sample Gitorious commit payload # source: http://gitorious.org/gitorious/pages/WebHooks gitJsonPayload = b""" { "after": "df5744f7bc8663b39717f87742dc94f52ccbf4dd", "before": "b4ca2d38e756695133cbd0e03d078804e1dc6610", "commits": [ { "author": { "email": "jason@nospam.org", "name": "jason" }, "committed_at": "2012-01-10T11:02:27-07:00", "id": "df5744f7bc8663b39717f87742dc94f52ccbf4dd", "message": "added a place to put the docstring for Book", "timestamp": "2012-01-10T11:02:27-07:00", "url": "http://gitorious.org/q/mainline/commit/df5744f7bc8663b39717f87742dc94f52ccbf4dd" } ], "project": { "description": "a webapp to organize your ebook collectsion.", "name": "q" }, "pushed_at": "2012-01-10T11:09:25-07:00", "pushed_by": "jason", "ref": "new_look", "repository": { "clones": 4, "description": "", "name": "mainline", "owner": { "name": "jason" }, "url": "http://gitorious.org/q/mainline" } } """ class TestChangeHookConfiguredWithGitChange(unittest.TestCase, TestReactorMixin): def setUp(self): self.setUpTestReactor() dialects = {'gitorious': True} self.changeHook = change_hook.ChangeHookResource( dialects=dialects, master=fakeMasterForHooks(self)) # Test 'base' hook with attributes. We should get a json string # representing a Change object as a dictionary. All values show be set. @defer.inlineCallbacks def testGitWithChange(self): changeDict = {b"payload": [gitJsonPayload]} self.request = FakeRequest(changeDict) self.request.uri = b"/change_hook/gitorious" self.request.method = b"POST" yield self.request.test_render(self.changeHook) self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 1) change = self.changeHook.master.data.updates.changesAdded[0] # Gitorious doesn't send changed files self.assertEqual(change['files'], []) self.assertEqual(change["repository"], "http://gitorious.org/q/mainline") self.assertEqual( change["when_timestamp"], 1326218547 ) self.assertEqual(change["author"], "jason ") self.assertEqual(change["revision"], 'df5744f7bc8663b39717f87742dc94f52ccbf4dd') self.assertEqual(change["comments"], "added a place to put the docstring for Book") self.assertEqual(change["branch"], "new_look") revlink = ("http://gitorious.org/q/mainline/commit/" "df5744f7bc8663b39717f87742dc94f52ccbf4dd") self.assertEqual(change["revlink"], revlink) @defer.inlineCallbacks def testGitWithNoJson(self): self.request = FakeRequest() self.request.uri = b"/change_hook/gitorious" self.request.method = b"GET" yield self.request.test_render(self.changeHook) expected = b"Error processing changes." self.assertEqual(len(self.changeHook.master.data.updates.changesAdded), 0) self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(500, expected) self.assertEqual(len(self.flushLoggedErrors()), 1) buildbot-2.6.0/master/buildbot/test/unit/test_www_hooks_poller.py000066400000000000000000000120051361162603000253710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot import util from buildbot.changes import base from buildbot.changes.manager import ChangeManager from buildbot.test.fake import fakemaster from buildbot.test.fake.web import FakeRequest from buildbot.test.util.misc import TestReactorMixin from buildbot.www import change_hook class TestPollingChangeHook(TestReactorMixin, unittest.TestCase): class Subclass(base.PollingChangeSource): pollInterval = None called = False def poll(self): self.called = True def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def setUpRequest(self, args, options=True, activate=True): self.request = FakeRequest(args=args) self.request.uri = b"/change_hook/poller" self.request.method = b"GET" www = self.request.site.master.www self.master = master = self.request.site.master = \ fakemaster.make_master(self, wantData=True) master.www = www yield self.master.startService() self.changeHook = change_hook.ChangeHookResource( dialects={'poller': options}, master=master) master.change_svc = ChangeManager() yield master.change_svc.setServiceParent(master) self.changesrc = self.Subclass(21, name=b'example') yield self.changesrc.setServiceParent(master.change_svc) self.otherpoller = self.Subclass(22, name=b"otherpoller") yield self.otherpoller.setServiceParent(master.change_svc) anotherchangesrc = base.ChangeSource(name=b'notapoller') anotherchangesrc.setName("notapoller") yield anotherchangesrc.setServiceParent(master.change_svc) yield self.request.test_render(self.changeHook) yield util.asyncSleep(0.1) def tearDown(self): return self.master.stopService() @defer.inlineCallbacks def test_no_args(self): yield self.setUpRequest({}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, True) @defer.inlineCallbacks def test_no_poller(self): yield self.setUpRequest({b"poller": [b"nosuchpoller"]}) expected = b"Could not find pollers: nosuchpoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_invalid_poller(self): yield self.setUpRequest({b"poller": [b"notapoller"]}) expected = b"Could not find pollers: notapoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_trigger_poll(self): yield self.setUpRequest({b"poller": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_deny(self): yield self.setUpRequest({b"poller": [b"otherpoller"]}, options={b"allowed": [b"example"]}) expected = b"Could not find pollers: otherpoller" self.assertEqual(self.request.written, expected) self.request.setResponseCode.assert_called_with(400, expected) self.assertEqual(self.changesrc.called, False) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_allow(self): yield self.setUpRequest({b"poller": [b"example"]}, options={b"allowed": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) @defer.inlineCallbacks def test_allowlist_all(self): yield self.setUpRequest({}, options={b"allowed": [b"example"]}) self.assertEqual(self.request.written, b"no change found") self.assertEqual(self.changesrc.called, True) self.assertEqual(self.otherpoller.called, False) buildbot-2.6.0/master/buildbot/test/unit/test_www_ldapuserinfo.py000066400000000000000000000234601361162603000253730ustar00rootroot00000000000000# coding: utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import sys import types import mock from twisted.internet import defer from twisted.trial import unittest def get_config_parameter(p): params = {'DEFAULT_SERVER_ENCODING': 'utf-8'} return params[p] fake_ldap = types.ModuleType('ldap3') fake_ldap.SEARCH_SCOPE_WHOLE_SUBTREE = 2 fake_ldap.get_config_parameter = get_config_parameter with mock.patch.dict(sys.modules, {'ldap3': fake_ldap}): from buildbot.www import ldapuserinfo class FakeLdap: def __init__(self): def search(base, filterstr='f', scope=None, attributes=None): pass self.search = mock.Mock(spec=search) class CommonTestCase(unittest.TestCase): """Common fixture for all ldapuserinfo tests we completely fake the ldap3 module, so no need to require it to run the unit tests """ def setUp(self): self.ldap = FakeLdap() self.makeUserInfoProvider() self.userInfoProvider.connectLdap = lambda: self.ldap def search(base, filterstr='f', attributes=None): pass self.userInfoProvider.search = mock.Mock(spec=search) def makeUserInfoProvider(self): """To be implemented by subclasses""" raise NotImplementedError def _makeSearchSideEffect(self, attribute_type, ret): ret = [[{'dn': i[0], attribute_type: i[1]} for i in r] for r in ret] self.userInfoProvider.search.side_effect = ret def makeSearchSideEffect(self, ret): return self._makeSearchSideEffect('attributes', ret) def makeRawSearchSideEffect(self, ret): return self._makeSearchSideEffect('raw_attributes', ret) def assertSearchCalledWith(self, exp): got = self.userInfoProvider.search.call_args_list self.assertEqual(len(exp), len(got)) for i, val in enumerate(exp): self.assertEqual(exp[i][0][0], got[i][0][1]) self.assertEqual(exp[i][0][1], got[i][0][2]) self.assertEqual(exp[i][0][2], got[i][1]['attributes']) class LdapUserInfo(CommonTestCase): def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", groupBase="groupbase", accountPattern="accpattern", groupMemberPattern="groupMemberPattern", accountFullName="accountFullName", accountEmail="accountEmail", groupName="groupName", avatarPattern="avatar", avatarData="picture", accountExtraFields=["myfield"]) @defer.inlineCallbacks def test_updateUserInfoNoResults(self): self.makeSearchSideEffect([[], [], []]) try: yield self.userInfoProvider.getUserInfo("me") except KeyError as e: self.assertRegex( repr(e), r"KeyError\('ldap search \"accpattern\" returned 0 results',?\)") else: self.fail("should have raised a key error") @defer.inlineCallbacks def test_updateUserInfoNoGroups(self): self.makeSearchSideEffect([[( "cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertSearchCalledWith([ (('accbase', 'accpattern', ['accountEmail', 'accountFullName', 'myfield']), {}), (('groupbase', 'groupMemberPattern', ['groupName']), {}), ]) self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': [], 'username': 'me'}) @defer.inlineCallbacks def test_updateUserInfoGroups(self): self.makeSearchSideEffect([[("cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [("cn", {"groupName": ["group"]}), ("cn", {"groupName": ["group2"]}) ], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': ["group", "group2"], 'username': 'me'}) @defer.inlineCallbacks def test_updateUserInfoGroupsUnicodeDn(self): # In case of non Ascii DN, ldap3 lib returns an UTF-8 str dn = "cn=Sébastien,dc=example,dc=org" # If groupMemberPattern is an str, and dn is not decoded, # the resulting filter will be an str, leading to UnicodeDecodeError # in ldap3.protocol.convert.validate_assertion_value() # So we use an unicode pattern: self.userInfoProvider.groupMemberPattern = '(member=%(dn)s)' self.makeSearchSideEffect([[(dn, {"accountFullName": "me too", "accountEmail": "mee@too"})], [("cn", {"groupName": ["group"]}), ("cn", {"groupName": ["group2"]}) ], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': ["group", "group2"], 'username': 'me'}) @defer.inlineCallbacks def _getUserAvatar(self, mimeTypeAndData): (mimeType, data) = mimeTypeAndData self.makeRawSearchSideEffect([ [("cn", {"picture": [data]})]]) res = yield self.userInfoProvider.getUserAvatar("me", 21, None) self.assertSearchCalledWith([ (('accbase', 'avatar', ['picture']), {}), ]) return res @defer.inlineCallbacks def test_getUserAvatarPNG(self): mimeTypeAndData = ('image/png', b'\x89PNG lljklj') res = yield self._getUserAvatar(mimeTypeAndData) self.assertEqual(res, mimeTypeAndData) @defer.inlineCallbacks def test_getUserAvatarJPEG(self): mimeTypeAndData = ('image/jpeg', b'\xff\xd8\xff lljklj') res = yield self._getUserAvatar(mimeTypeAndData) self.assertEqual(res, mimeTypeAndData) @defer.inlineCallbacks def test_getUserAvatarGIF(self): mimeTypeAndData = ('image/gif', b'GIF8 lljklj') res = yield self._getUserAvatar(mimeTypeAndData) self.assertEqual(res, mimeTypeAndData) @defer.inlineCallbacks def test_getUserAvatarUnknownType(self): mimeTypeAndData = ('', b'unknown image format') res = yield self._getUserAvatar(mimeTypeAndData) self.assertIsNone(res) class LdapUserInfoNoGroups(CommonTestCase): def makeUserInfoProvider(self): self.userInfoProvider = ldapuserinfo.LdapUserInfo( uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail", avatarPattern="avatar", avatarData="picture", accountExtraFields=["myfield"]) @defer.inlineCallbacks def test_updateUserInfo(self): self.makeSearchSideEffect([[( "cn", {"accountFullName": "me too", "accountEmail": "mee@too"})], [], []]) res = yield self.userInfoProvider.getUserInfo("me") self.assertSearchCalledWith([ (('accbase', 'accpattern', ['accountEmail', 'accountFullName', 'myfield']), {}), ]) self.assertEqual(res, {'email': 'mee@too', 'full_name': 'me too', 'groups': [], 'username': 'me'}) class Config(unittest.TestCase): def test_missing_group_name(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupMemberPattern="member=%(dn)s", groupBase="grpbase", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") def test_missing_group_base(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupMemberPattern="member=%(dn)s", groupName="group", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") def test_missing_two_params(self): with self.assertRaises(ValueError): ldapuserinfo.LdapUserInfo(groupName="group", uri="ldap://uri", bindUser="user", bindPw="pass", accountBase="accbase", accountPattern="accpattern", accountFullName="accountFullName", accountEmail="accountEmail") buildbot-2.6.0/master/buildbot/test/unit/test_www_oauth.py000066400000000000000000000565021361162603000240230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import webbrowser import mock import twisted from twisted.internet import defer from twisted.internet import reactor from twisted.internet import threads from twisted.python import failure from twisted.trial import unittest from twisted.web.resource import Resource from twisted.web.server import Site from buildbot.process.properties import Secret from buildbot.secrets.manager import SecretManager from buildbot.test.fake.secrets import FakeSecretStorage from buildbot.test.util import www from buildbot.test.util.config import ConfigErrorsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode try: import requests except ImportError: requests = None if requests: from buildbot.www import oauth2 # pylint: disable=ungrouped-imports class FakeResponse: def __init__(self, _json): self.json = lambda: _json self.content = json.dumps(_json) def raise_for_status(self): pass class OAuth2Auth(TestReactorMixin, www.WwwTestMixin, ConfigErrorsMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() if requests is None: raise unittest.SkipTest("Need to install requests to test oauth2") self.patch(requests, 'request', mock.Mock(spec=requests.request)) self.patch(requests, 'post', mock.Mock(spec=requests.post)) self.patch(requests, 'get', mock.Mock(spec=requests.get)) self.googleAuth = oauth2.GoogleAuth("ggclientID", "clientSECRET") self.githubAuth = oauth2.GitHubAuth("ghclientID", "clientSECRET") self.githubAuth_v4 = oauth2.GitHubAuth( "ghclientID", "clientSECRET", apiVersion=4) self.githubAuth_v4_teams = oauth2.GitHubAuth( "ghclientID", "clientSECRET", apiVersion=4, getTeamsMembership=True) self.githubAuthEnt = oauth2.GitHubAuth( "ghclientID", "clientSECRET", serverURL="https://git.corp.fakecorp.com") self.gitlabAuth = oauth2.GitLabAuth( "https://gitlab.test/", "glclientID", "clientSECRET") self.bitbucketAuth = oauth2.BitbucketAuth("bbclientID", "clientSECRET") for auth in [self.googleAuth, self.githubAuth, self.githubAuth_v4, self.githubAuth_v4_teams, self.githubAuthEnt, self.gitlabAuth, self.bitbucketAuth]: self._master = master = self.make_master(url='h:/a/b/', auth=auth) auth.reconfigAuth(master, master.config) self.githubAuth_secret = oauth2.GitHubAuth( Secret("client-id"), Secret("client-secret"), apiVersion=4) self._master = master = self.make_master(url='h:/a/b/', auth=auth) fake_storage_service = FakeSecretStorage() fake_storage_service.reconfigService(secretdict={"client-id": "secretClientId", "client-secret": "secretClientSecret"}) secret_service = SecretManager() secret_service.services = [fake_storage_service] yield secret_service.setServiceParent(self._master) self.githubAuth_secret.reconfigAuth(master, master.config) @defer.inlineCallbacks def test_getGoogleLoginURL(self): res = yield self.googleAuth.getLoginURL('http://redir') exp = ("https://accounts.google.com/o/oauth2/auth?client_id=ggclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.email+" "https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.profile&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.googleAuth.getLoginURL(None) exp = ("https://accounts.google.com/o/oauth2/auth?client_id=ggclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.email+" "https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.profile") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubLoginURL(self): res = yield self.githubAuth.getLoginURL('http://redir') exp = ("https://github.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuth.getLoginURL(None) exp = ("https://github.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubLoginURL_with_secret(self): res = yield self.githubAuth_secret.getLoginURL('http://redir') exp = ("https://github.com/login/oauth/authorize?client_id=secretClientId&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuth_secret.getLoginURL(None) exp = ("https://github.com/login/oauth/authorize?client_id=secretClientId&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGithubELoginURL(self): res = yield self.githubAuthEnt.getLoginURL('http://redir') exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.githubAuthEnt.getLoginURL(None) exp = ("https://git.corp.fakecorp.com/login/oauth/authorize?client_id=ghclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&response_type=code&" "scope=user%3Aemail+read%3Aorg") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getGitLabLoginURL(self): res = yield self.gitlabAuth.getLoginURL('http://redir') exp = ("https://gitlab.test/oauth/authorize" "?client_id=glclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.gitlabAuth.getLoginURL(None) exp = ("https://gitlab.test/oauth/authorize" "?client_id=glclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code") self.assertEqual(res, exp) @defer.inlineCallbacks def test_getBitbucketLoginURL(self): res = yield self.bitbucketAuth.getLoginURL('http://redir') exp = ("https://bitbucket.org/site/oauth2/authorize?" "client_id=bbclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code&" "state=redirect%3Dhttp%253A%252F%252Fredir") self.assertEqual(res, exp) res = yield self.bitbucketAuth.getLoginURL(None) exp = ("https://bitbucket.org/site/oauth2/authorize?" "client_id=bbclientID&" "redirect_uri=h%3A%2Fa%2Fb%2Fauth%2Flogin&" "response_type=code") self.assertEqual(res, exp) @defer.inlineCallbacks def test_GoogleVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.googleAuth.get = mock.Mock(side_effect=[dict( name="foo bar", email="bar@foo", picture="http://pic")]) res = yield self.googleAuth.verifyCode("code!") self.assertEqual({'avatar_url': 'http://pic', 'email': 'bar@foo', 'full_name': 'foo bar', 'username': 'bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.githubAuth.get = mock.Mock(side_effect=[ dict( # /user login="bar", name="foo bar", email="buzz@bar"), [ # /user/emails {'email': 'buzz@bar', 'verified': True, 'primary': False}, {'email': 'bar@foo', 'verified': True, 'primary': True}], [ # /user/orgs dict(login="hello"), dict(login="grp"), ]]) res = yield self.githubAuth.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode_v4(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.githubAuth_v4.post = mock.Mock(side_effect=[ { 'data': { 'viewer': { 'organizations': { 'edges': [ { 'node': { 'login': 'hello' } }, { 'node': { 'login': 'grp' } } ] }, 'login': 'bar', 'email': 'bar@foo', 'name': 'foo bar' } } } ]) res = yield self.githubAuth_v4.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_GithubVerifyCode_v4_teams(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.githubAuth_v4_teams.post = mock.Mock(side_effect=[ { 'data': { 'viewer': { 'organizations': { 'edges': [ { 'node': { 'login': 'hello' } }, { 'node': { 'login': 'grp' } } ] }, 'login': 'bar', 'email': 'bar@foo', 'name': 'foo bar' } } }, { 'data': { 'hello': { 'teams': { 'edges': [ { 'node': { 'name': 'developers', 'slug': 'develpers' } }, { 'node': { 'name': 'contributors', 'slug': 'contributors' } } ] } }, 'grp': { 'teams': { 'edges': [ { 'node': { 'name': 'developers', 'slug': 'develpers' } }, { 'node': { 'name': 'contributors', 'slug': 'contributors' } }, { 'node': { 'name': 'committers', 'slug': 'committers' } }, { 'node': { 'name': 'Team with spaces and caps', 'slug': 'team-with-spaces-and-caps' } }, ] } }, } } ]) res = yield self.githubAuth_v4_teams.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', 'groups': [ 'hello', 'grp', 'grp/Team with spaces and caps', 'grp/committers', 'grp/contributors', 'grp/developers', 'grp/develpers', 'grp/team-with-spaces-and-caps', 'hello/contributors', 'hello/developers', 'hello/develpers', ], 'full_name': 'foo bar'}, res) def test_GitHubAuthBadApiVersion(self): for bad_api_version in (2, 5, 'a'): with self.assertRaisesConfigError( 'GitHubAuth apiVersion must be 3 or 4 not '): oauth2.GitHubAuth("ghclientID", "clientSECRET", apiVersion=bad_api_version) def test_GitHubAuthRaiseErrorWithApiV3AndGetTeamMembership(self): with self.assertRaisesConfigError( 'Retrieving team membership information using GitHubAuth is only possible using GitHub api v4.'): oauth2.GitHubAuth("ghclientID", "clientSECRET", apiVersion=3, getTeamsMembership=True) @defer.inlineCallbacks def test_GitlabVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.gitlabAuth.get = mock.Mock(side_effect=[ { # /user "name": "Foo Bar", "username": "fbar", "id": 5, "avatar_url": "https://avatar/fbar.png", "email": "foo@bar", "twitter": "fb", }, [ # /groups {"id": 10, "name": "Hello", "path": "hello"}, {"id": 20, "name": "Group", "path": "grp"}, ]]) res = yield self.gitlabAuth.verifyCode("code!") self.assertEqual({"full_name": "Foo Bar", "username": "fbar", "email": "foo@bar", "avatar_url": "https://avatar/fbar.png", "groups": ["hello", "grp"]}, res) @defer.inlineCallbacks def test_BitbucketVerifyCode(self): requests.get.side_effect = [] requests.post.side_effect = [ FakeResponse(dict(access_token="TOK3N"))] self.bitbucketAuth.get = mock.Mock(side_effect=[ dict( # /user username="bar", display_name="foo bar"), dict( # /user/emails values=[ {'email': 'buzz@bar', 'is_primary': False}, {'email': 'bar@foo', 'is_primary': True}]), dict( # /teams?role=member values=[ {'username': 'hello'}, {'username': 'grp'}]) ]) res = yield self.bitbucketAuth.verifyCode("code!") self.assertEqual({'email': 'bar@foo', 'username': 'bar', "groups": ["hello", "grp"], 'full_name': 'foo bar'}, res) @defer.inlineCallbacks def test_loginResource(self): class fakeAuth: homeUri = "://me" getLoginURL = mock.Mock(side_effect=lambda x: defer.succeed("://")) verifyCode = mock.Mock( side_effect=lambda code: defer.succeed({"username": "bar"})) acceptToken = mock.Mock( side_effect=lambda token: defer.succeed({"username": "bar"})) userInfoProvider = None rsrc = self.githubAuth.getLoginResource() rsrc.auth = fakeAuth() res = yield self.render_resource(rsrc, b'/') rsrc.auth.getLoginURL.assert_called_once_with(None) rsrc.auth.verifyCode.assert_not_called() self.assertEqual(res, {'redirected': b'://'}) rsrc.auth.getLoginURL.reset_mock() rsrc.auth.verifyCode.reset_mock() res = yield self.render_resource(rsrc, b'/?code=code!') rsrc.auth.getLoginURL.assert_not_called() rsrc.auth.verifyCode.assert_called_once_with(b"code!") self.assertEqual(self.master.session.user_info, {'username': 'bar'}) self.assertEqual(res, {'redirected': b'://me'}) # token not supported anymore res = yield self.render_resource(rsrc, b'/?token=token!') rsrc.auth.getLoginURL.assert_called_once() def test_getConfig(self): self.assertEqual(self.githubAuth.getConfigDict(), {'fa_icon': 'fa-github', 'autologin': False, 'name': 'GitHub', 'oauth2': True}) self.assertEqual(self.googleAuth.getConfigDict(), {'fa_icon': 'fa-google-plus', 'autologin': False, 'name': 'Google', 'oauth2': True}) self.assertEqual(self.gitlabAuth.getConfigDict(), {'fa_icon': 'fa-git', 'autologin': False, 'name': 'GitLab', 'oauth2': True}) self.assertEqual(self.bitbucketAuth.getConfigDict(), {'fa_icon': 'fa-bitbucket', 'autologin': False, 'name': 'Bitbucket', 'oauth2': True}) # unit tests are not very useful to write new oauth support # so following is an e2e test, which opens a browser, and do the oauth # negotiation. The browser window close in the end of the test # in order to use this tests, you need to create Github/Google ClientID (see doc on how to do it) # point OAUTHCONF environment variable to a file with following params: # { # "GitHubAuth": { # "CLIENTID": "XX # "CLIENTSECRET": "XX" # }, # "GoogleAuth": { # "CLIENTID": "XX", # "CLIENTSECRET": "XX" # } # "GitLabAuth": { # "INSTANCEURI": "XX", # "CLIENTID": "XX", # "CLIENTSECRET": "XX" # } # } class OAuth2AuthGitHubE2E(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): authClass = "GitHubAuth" def _instantiateAuth(self, cls, config): return cls(config["CLIENTID"], config["CLIENTSECRET"]) def setUp(self): self.setUpTestReactor() if requests is None: raise unittest.SkipTest("Need to install requests to test oauth2") if "OAUTHCONF" not in os.environ: raise unittest.SkipTest( "Need to pass OAUTHCONF path to json file via environ to run this e2e test") with open(os.environ['OAUTHCONF']) as f: jsonData = f.read() config = json.loads(jsonData)[self.authClass] from buildbot.www import oauth2 self.auth = self._instantiateAuth( getattr(oauth2, self.authClass), config) # 5000 has to be hardcoded, has oauth clientids are bound to a fully # classified web site master = self.make_master(url='http://localhost:5000/', auth=self.auth) self.auth.reconfigAuth(master, master.config) def tearDown(self): from twisted.internet.tcp import Server # browsers has the bad habit on not closing the persistent # connections, so we need to hack them away to make trial happy f = failure.Failure(Exception("test end")) for reader in reactor.getReaders(): if isinstance(reader, Server): reader.connectionLost(f) @defer.inlineCallbacks def test_E2E(self): d = defer.Deferred() twisted.web.http._logDateTimeUsers = 1 class HomePage(Resource): isLeaf = True def render_GET(self, request): info = request.getSession().user_info reactor.callLater(0, d.callback, info) return (b"WORKED: " + info + b"") class MySite(Site): def makeSession(self): uid = self._mkuid() session = self.sessions[uid] = self.sessionFactory(self, uid) return session root = Resource() root.putChild(b"", HomePage()) auth = Resource() root.putChild(b'auth', auth) auth.putChild(b'login', self.auth.getLoginResource()) site = MySite(root) listener = reactor.listenTCP(5000, site) def thd(): res = requests.get('http://localhost:5000/auth/login') content = bytes2unicode(res.content) webbrowser.open(content) threads.deferToThread(thd) res = yield d yield listener.stopListening() yield site.stopFactory() self.assertIn("full_name", res) self.assertIn("email", res) self.assertIn("username", res) class OAuth2AuthGoogleE2E(OAuth2AuthGitHubE2E): authClass = "GoogleAuth" class OAuth2AuthGitLabE2E(OAuth2AuthGitHubE2E): authClass = "GitLabAuth" def _instantiateAuth(self, cls, config): return cls(config["INSTANCEURI"], config["CLIENTID"], config["CLIENTSECRET"]) buildbot-2.6.0/master/buildbot/test/unit/test_www_resource.py000066400000000000000000000040441361162603000245240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import resource class ResourceSubclass(resource.Resource): needsReconfig = True class Resource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def test_base_url(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.Resource(master) self.assertEqual(rsrc.base_url, b'h:/a/b/') def test_reconfigResource_registration(self): master = self.make_master(url=b'h:/a/b/') rsrc = ResourceSubclass(master) master.www.resourceNeedsReconfigs.assert_called_with(rsrc) class RedirectResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() def test_redirect(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.RedirectResource(master, b'foo') self.render_resource(rsrc, b'/') self.assertEqual(self.request.redirected_to, b'h:/a/b/foo') def test_redirect_cr_lf(self): master = self.make_master(url=b'h:/a/b/') rsrc = resource.RedirectResource(master, b'foo\r\nbar') self.render_resource(rsrc, b'/') self.assertEqual(self.request.redirected_to, b'h:/a/b/foo') buildbot-2.6.0/master/buildbot/test/unit/test_www_rest.py000066400000000000000000001132751361162603000236610ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re import mock from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import endpoint from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import authz from buildbot.www import rest from buildbot.www.rest import JSONRPC_CODES from buildbot.www.rest import BadRequest class RestRootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): maxVersion = 2 def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_render(self): master = self.make_master(url='h:/a/b/') rsrc = rest.RestRootResource(master) rv = yield self.render_resource(rsrc, b'/') self.assertIn(b'api_versions', rv) def test_versions(self): master = self.make_master(url='h:/a/b/') rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions = [unicode2bytes(v) for v in versions] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions)) def test_versions_limited(self): master = self.make_master(url='h:/a/b/') master.config.www['rest_minimum_version'] = 2 rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions)) class V2RootResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='http://server/path/') self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def assertSimpleError(self, message, responseCode): content = json.dumps({'error': message}) self.assertRequest(content=unicode2bytes(content), responseCode=responseCode) @defer.inlineCallbacks def test_failure(self): self.rsrc.renderRest = mock.Mock( return_value=defer.fail(RuntimeError('oh noes'))) yield self.render_resource(self.rsrc, b'/') self.assertSimpleError('internal error - see logs', 500) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_invalid_http_method(self): yield self.render_resource(self.rsrc, b'/', method=b'PATCH') self.assertSimpleError('invalid HTTP method', 400) def do_check_origin_regexp(self, goods, bads): self.assertEqual(len(self.rsrc.origins), 1) regexp = self.rsrc.origins[0] for good in goods: self.assertTrue( regexp.match(good), "{} should match default origin({}), but its not".format( good, regexp.pattern )) for bad in bads: self.assertFalse( regexp.match(bad), "{} should not match default origin({}), but it is".format( bad, regexp.pattern )) def test_default_origin(self): self.master.config.buildbotURL = 'http://server/path/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server"], ["http://otherserver", "http://otherserver:909"], ) self.master.config.buildbotURL = 'http://server/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server"], ["http://otherserver", "http://otherserver:909"], ) self.master.config.buildbotURL = 'http://server:8080/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["http://server:8080"], ["http://otherserver", "http://server:909"], ) self.master.config.buildbotURL = 'https://server:8080/' self.rsrc.reconfigResource(self.master.config) self.do_check_origin_regexp( ["https://server:8080"], ["http://server:8080", "https://otherserver:8080"], ) class V2RootResource_CORS(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.master.config.www['allowed_origins'] = [b'h://good'] self.rsrc.reconfigResource(self.master.config) def renderRest(request): request.write(b'ok') return defer.succeed(None) self.rsrc.renderRest = renderRest def assertOk(self, expectHeaders=True, content=b'ok', origin=b'h://good'): hdrs = { b'access-control-allow-origin': [origin], b'access-control-allow-headers': [b'Content-Type'], b'access-control-max-age': [b'3600'], } if expectHeaders else {} self.assertRequest(content=content, responseCode=200, headers=hdrs) def assertNotOk(self, message): content = json.dumps({'error': message}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400) @defer.inlineCallbacks def test_cors_no_origin(self): # if the browser doesn't send Origin, there's nothing we can do to # protect the user yield self.render_resource(self.rsrc, b'/') self.assertOk(expectHeaders=False) @defer.inlineCallbacks def test_cors_origin_match(self): yield self.render_resource(self.rsrc, b'/', origin=b'h://good') self.assertOk() @defer.inlineCallbacks def test_cors_origin_match_star(self): self.master.config.www['allowed_origins'] = ['*'] self.rsrc.reconfigResource(self.master.config) yield self.render_resource(self.rsrc, b'/', origin=b'h://good') self.assertOk() @defer.inlineCallbacks def test_cors_origin_patterns(self): self.master.config.www['allowed_origins'] = ['h://*.good', 'hs://*.secure'] self.rsrc.reconfigResource(self.master.config) yield self.render_resource(self.rsrc, b'/', origin=b'h://foo.good') self.assertOk(origin=b'h://foo.good') yield self.render_resource(self.rsrc, b'/', origin=b'hs://x.secure') self.assertOk(origin=b'hs://x.secure') yield self.render_resource(self.rsrc, b'/', origin=b'h://x.secure') self.assertNotOk('invalid origin') @defer.inlineCallbacks def test_cors_origin_mismatch(self): yield self.render_resource(self.rsrc, b'/', origin=b'h://bad') self.assertNotOk('invalid origin') @defer.inlineCallbacks def test_cors_origin_mismatch_post(self): yield self.render_resource(self.rsrc, b'/', method=b'POST', origin=b'h://bad') content = json.dumps({'error': {'message': 'invalid origin'}}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400) @defer.inlineCallbacks def test_cors_origin_preflight_match_GET(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'GET') self.assertOk(content=b'') @defer.inlineCallbacks def test_cors_origin_preflight_match_POST(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'POST') self.assertOk(content=b'') @defer.inlineCallbacks def test_cors_origin_preflight_bad_method(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://good', access_control_request_method=b'PATCH') self.assertNotOk(message='invalid method') @defer.inlineCallbacks def test_cors_origin_preflight_bad_origin(self): yield self.render_resource(self.rsrc, b'/', method=b'OPTIONS', origin=b'h://bad', access_control_request_method=b'GET') self.assertNotOk(message='invalid origin') class V2RootResource_REST(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') self.master.config.www['debug'] = True self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def allow(*args, **kw): return self.master.www.assertUserAllowed = allow endpoint.TestEndpoint.rtype = mock.MagicMock() endpoint.TestsEndpoint.rtype = mock.MagicMock() endpoint.Test.isCollection = True endpoint.Test.rtype = endpoint.Test def assertRestCollection(self, typeName, items, total=None, contentType=None, orderSignificant=False): self.assertFalse(isinstance(self.request.written, str)) got = {} got['content'] = json.loads(bytes2unicode(self.request.written)) got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode meta = {} if total is not None: meta['total'] = total exp = {} exp['content'] = {typeName: items, 'meta': meta} exp['contentType'] = [contentType or b'text/plain; charset=utf-8'] exp['responseCode'] = 200 # if order is not significant, sort so the comparison works if not orderSignificant: if 'content' in got and typeName in got['content']: got['content'][typeName].sort(key=lambda x: sorted(x.items())) exp['content'][typeName].sort(key=lambda x: sorted(x.items())) if 'meta' in got['content'] and 'links' in got['content']['meta']: got['content']['meta']['links'].sort( key=lambda l: (l['rel'], l['href'])) self.assertEqual(got, exp) def assertRestDetails(self, typeName, item, contentType=None): got = {} got['content'] = json.loads(bytes2unicode(self.request.written)) got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode exp = {} exp['content'] = { typeName: [item], 'meta': {}, } exp['contentType'] = [contentType or b'text/plain; charset=utf-8'] exp['responseCode'] = 200 self.assertEqual(got, exp) def assertRestError(self, responseCode, message): content = json.loads(bytes2unicode(self.request.written)) gotResponseCode = self.request.responseCode self.assertEqual(list(content.keys()), ['error']) self.assertRegex(content['error'], message) self.assertEqual(responseCode, gotResponseCode) @defer.inlineCallbacks def test_not_found(self): yield self.render_resource(self.rsrc, b'/not/found') self.assertRequest( contentJson=dict(error='Invalid path: not/found'), contentType=b'text/plain; charset=utf-8', responseCode=404) @defer.inlineCallbacks def test_invalid_query(self): yield self.render_resource(self.rsrc, b'/test?huh=1') self.assertRequest( contentJson=dict(error="unrecognized query parameter 'huh'"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_raw(self): yield self.render_resource(self.rsrc, b'/rawtest') self.assertRequest( content=b"value", contentType=b'text/test; charset=utf-8', responseCode=200, headers={b"content-disposition": [b'attachment; filename=test.txt']}) @defer.inlineCallbacks def test_api_head(self): get = yield self.render_resource(self.rsrc, b'/test', method=b'GET') head = yield self.render_resource(self.rsrc, b'/test', method=b'HEAD') self.assertEqual(head, b'') self.assertEqual(int(self.request.headers[b'content-length'][0]), len(get)) @defer.inlineCallbacks def test_api_collection(self): yield self.render_resource(self.rsrc, b'/test') self.assertRestCollection(typeName='tests', items=list(endpoint.testData.values()), total=8) @defer.inlineCallbacks def do_test_api_collection_pagination(self, query, ids, links): yield self.render_resource(self.rsrc, b'/test' + query) self.assertRestCollection(typeName='tests', items=[v for k, v in endpoint.testData.items() if k in ids], total=8) def test_api_collection_limit(self): return self.do_test_api_collection_pagination(b'?limit=2', [13, 14], { 'self': '%(self)s?limit=2', 'next': '%(self)s?offset=2&limit=2', }) def test_api_collection_offset(self): return self.do_test_api_collection_pagination(b'?offset=2', [15, 16, 17, 18, 19, 20], { 'self': '%(self)s?offset=2', 'first': '%(self)s', }) def test_api_collection_offset_limit(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=2', [18, 19], { 'first': '%(self)s?limit=2', 'prev': '%(self)s?offset=3&limit=2', 'next': '%(self)s?offset=7&limit=2', 'self': '%(self)s?offset=5&limit=2', }) def test_api_collection_limit_at_end(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=3', [18, 19, 20], { 'first': '%(self)s?limit=3', 'prev': '%(self)s?offset=2&limit=3', 'self': '%(self)s?offset=5&limit=3', }) def test_api_collection_limit_past_end(self): return self.do_test_api_collection_pagination(b'?offset=5&limit=20', [18, 19, 20], { 'first': '%(self)s?limit=20', 'prev': '%(self)s?limit=5', 'self': '%(self)s?offset=5&limit=20', }) def test_api_collection_offset_past_end(self): return self.do_test_api_collection_pagination(b'?offset=50&limit=10', [], { 'first': '%(self)s?limit=10', 'prev': '%(self)s?offset=40&limit=10', 'self': '%(self)s?offset=50&limit=10', }) @defer.inlineCallbacks def test_api_collection_invalid_limit(self): yield self.render_resource(self.rsrc, b'/test?limit=foo!') self.assertRequest( contentJson=dict(error="invalid limit"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_offset(self): yield self.render_resource(self.rsrc, b'/test?offset=foo!') self.assertRequest( contentJson=dict(error="invalid offset"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_simple_filter_value(self): yield self.render_resource(self.rsrc, b'/test?success=sorta') self.assertRequest( contentJson=dict(error="invalid filter value for success"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_invalid_filter_value(self): yield self.render_resource(self.rsrc, b'/test?id__lt=fifteen') self.assertRequest( contentJson=dict(error="invalid filter value for id__lt"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_fields(self): yield self.render_resource(self.rsrc, b'/test?field=success&field=info') self.assertRestCollection(typeName='tests', items=[{'success': v['success'], 'info': v['info']} for v in endpoint.testData.values()], total=8) @defer.inlineCallbacks def test_api_collection_invalid_field(self): yield self.render_resource(self.rsrc, b'/test?field=success&field=WTF') self.assertRequest( contentJson=dict(error="no such field 'WTF'"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_collection_simple_filter(self): yield self.render_resource(self.rsrc, b'/test?success=yes') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if v['success']], total=5) @defer.inlineCallbacks def test_api_collection_list_filter(self): yield self.render_resource(self.rsrc, b'/test?tags__contains=a') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if 'a' in v['tags']], total=2) @defer.inlineCallbacks def test_api_collection_operator_filter(self): yield self.render_resource(self.rsrc, b'/test?info__lt=skipped') self.assertRestCollection(typeName='tests', items=[v for v in endpoint.testData.values() if v['info'] < 'skipped'], total=4) @defer.inlineCallbacks def test_api_collection_order(self): yield self.render_resource(self.rsrc, b'/test?order=info') self.assertRestCollection(typeName='tests', items=sorted(list(endpoint.testData.values()), key=lambda v: v['info']), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_filter_and_order(self): yield self.render_resource(self.rsrc, b'/test?field=info&order=info') self.assertRestCollection(typeName='tests', items=sorted(list([{'info': v['info']} for v in endpoint.testData.values()]), key=lambda v: v['info']), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_order_desc(self): yield self.render_resource(self.rsrc, b'/test?order=-info') self.assertRestCollection(typeName='tests', items=sorted(list(endpoint.testData.values()), key=lambda v: v['info'], reverse=True), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_filter_and_order_desc(self): yield self.render_resource(self.rsrc, b'/test?field=info&order=-info') self.assertRestCollection(typeName='tests', items=sorted(list([{'info': v['info']} for v in endpoint.testData.values()]), key=lambda v: v['info'], reverse=True), total=8, orderSignificant=True) @defer.inlineCallbacks def test_api_collection_order_on_unselected(self): yield self.render_resource(self.rsrc, b'/test?field=id&order=info') self.assertRestError(message="cannot order on un-selected fields", responseCode=400) @defer.inlineCallbacks def test_api_collection_filter_on_unselected(self): yield self.render_resource(self.rsrc, b'/test?field=id&info__gt=xx') self.assertRestError(message="cannot filter on un-selected fields", responseCode=400) @defer.inlineCallbacks def test_api_collection_filter_pagination(self): yield self.render_resource(self.rsrc, b'/test?success=false&limit=2') # note that the limit/offset and total are *after* the filter self.assertRestCollection(typeName='tests', items=sorted([v for v in endpoint.testData.values() if not v['success']], key=lambda v: v['id'])[:2], total=3) @defer.inlineCallbacks def test_api_details(self): yield self.render_resource(self.rsrc, b'/test/13') self.assertRestDetails(typeName='tests', item=endpoint.testData[13]) @defer.inlineCallbacks def test_api_details_none(self): self.maxDiff = None yield self.render_resource(self.rsrc, b'/test/0') self.assertRequest( contentJson={'error': "not found while getting from endpoint for /test/n:testid with arguments" " ResultSpec(**{'filters': [], 'fields': None, 'properties': [], " "'order': None, 'limit': None, 'offset': None}) and {'testid': 0}"}, contentType=b'text/plain; charset=utf-8', responseCode=404) @defer.inlineCallbacks def test_api_details_filter_fails(self): yield self.render_resource(self.rsrc, b'/test/13?success=false') self.assertRequest( contentJson=dict(error="this is not a collection"), contentType=b'text/plain; charset=utf-8', responseCode=400) @defer.inlineCallbacks def test_api_details_fields(self): yield self.render_resource(self.rsrc, b'/test/13?field=info') self.assertRestDetails(typeName='tests', item={'info': endpoint.testData[13]['info']}) @defer.inlineCallbacks def test_api_with_accept(self): # when 'application/json' is accepted, the result has that type yield self.render_resource(self.rsrc, b'/test/13', accept=b'application/json') self.assertRestDetails(typeName='tests', item=endpoint.testData[13], contentType=b'application/json; charset=utf-8') @defer.inlineCallbacks def test_api_fails(self): yield self.render_resource(self.rsrc, b'/test/fail') self.assertRestError(message=r"RuntimeError\('oh noes',?\)", responseCode=500) self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) def test_decode_result_spec_raise_bad_request_on_bad_property_value(self): expected_props = [None, 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} with self.assertRaises(BadRequest): self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) def test_decode_result_spec_limit(self): expected_limit = 5 self.make_request(b'/test') self.request.args = {b'limit': str(expected_limit)} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.limit, expected_limit) def test_decode_result_spec_order(self): expected_order = ('info',) self.make_request(b'/test') self.request.args = {b'order': expected_order} spec = self.rsrc.decodeResultSpec(self.request, endpoint.Test) self.assertEqual(spec.order, expected_order) def test_decode_result_spec_offset(self): expected_offset = 5 self.make_request(b'/test') self.request.args = {b'offset': str(expected_offset)} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.offset, expected_offset) def test_decode_result_spec_properties(self): expected_props = ['test1', 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestsEndpoint) self.assertEqual(spec.properties[0].values, expected_props) def test_decode_result_spec_not_a_collection_limit(self): def expectRaiseBadRequest(): limit = 5 self.make_request(b'/test') self.request.args = {b'limit': limit} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(rest.BadRequest): expectRaiseBadRequest() def test_decode_result_spec_not_a_collection_order(self): def expectRaiseBadRequest(): order = ('info',) self.make_request(b'/test') self.request.args = {b'order': order} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(rest.BadRequest): expectRaiseBadRequest() def test_decode_result_spec_not_a_collection_offset(self): def expectRaiseBadRequest(): offset = 0 self.make_request(b'/test') self.request.args = {b'offset': offset} self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) with self.assertRaises(rest.BadRequest): expectRaiseBadRequest() def test_decode_result_spec_not_a_collection_properties(self): expected_props = ['test1', 'test2'] self.make_request(b'/test') self.request.args = {b'property': expected_props} spec = self.rsrc.decodeResultSpec(self.request, endpoint.TestEndpoint) self.assertEqual(spec.properties[0].values, expected_props) @defer.inlineCallbacks def test_authz_forbidden(self): def deny(request, ep, action, options): if "test" in ep: raise authz.Forbidden("no no") return None self.master.www.assertUserAllowed = deny yield self.render_resource(self.rsrc, b'/test') self.assertRestAuthError(message=re.compile('no no'), responseCode=403) def assertRestAuthError(self, message, responseCode=400): got = {} got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode content = json.loads(bytes2unicode(self.request.written)) if 'error' not in content: self.fail("response does not have proper error form: %r" % (content,)) got['error'] = content['error'] exp = {} exp['contentType'] = [b'text/plain; charset=utf-8'] exp['responseCode'] = responseCode exp['error'] = message # process a regular expression for message, if given if not isinstance(message, str): if message.match(got['error']): exp['error'] = got['error'] else: exp['error'] = "MATCHING: %s" % (message.pattern,) self.assertEqual(got, exp) class V2RootResource_JSONRPC2(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/') def allow(*args, **kw): return self.master.www.assertUserAllowed = allow self.master.data._scanModule(endpoint) self.rsrc = rest.V2RootResource(self.master) self.rsrc.reconfigResource(self.master.config) def assertJsonRpcError(self, message, responseCode=400, jsonrpccode=None): got = {} got['contentType'] = self.request.headers[b'content-type'] got['responseCode'] = self.request.responseCode content = json.loads(bytes2unicode(self.request.written)) if ('error' not in content or sorted(content['error'].keys()) != ['code', 'message']): self.fail("response does not have proper error form: %r" % (content,)) got['error'] = content['error'] exp = {} exp['contentType'] = [b'application/json'] exp['responseCode'] = responseCode exp['error'] = {'code': jsonrpccode, 'message': message} # process a regular expression for message, if given if not isinstance(message, str): if message.match(got['error']['message']): exp['error']['message'] = got['error']['message'] else: exp['error']['message'] = "MATCHING: %s" % (message.pattern,) self.assertEqual(got, exp) @defer.inlineCallbacks def test_invalid_path(self): yield self.render_control_resource(self.rsrc, b'/not/found') self.assertJsonRpcError( message='Invalid path: not/found', jsonrpccode=JSONRPC_CODES['invalid_request'], responseCode=404) @defer.inlineCallbacks def test_invalid_action(self): yield self.render_control_resource(self.rsrc, b'/test', action='nosuch') self.assertJsonRpcError( message='action: nosuch is not supported', jsonrpccode=JSONRPC_CODES['method_not_found'], responseCode=501) @defer.inlineCallbacks def test_invalid_json(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson="{abc") self.assertJsonRpcError( message=re.compile('^JSON parse error'), jsonrpccode=JSONRPC_CODES['parse_error']) @defer.inlineCallbacks def test_invalid_content_type(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson='{"jsonrpc": "2.0", "method": "foo",' '"id":"abcdef", "params": {}}', content_type='application/x-www-form-urlencoded') self.assertJsonRpcError( message=re.compile('Invalid content-type'), jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def test_list_request(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson="[1,2]") self.assertJsonRpcError( message="JSONRPC batch requests are not supported", jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def test_bad_req_type(self): yield self.render_control_resource(self.rsrc, b'/test', requestJson='"a string?!"') self.assertJsonRpcError( message="JSONRPC root object must be an object", jsonrpccode=JSONRPC_CODES['invalid_request']) @defer.inlineCallbacks def do_test_invalid_req(self, requestJson, message): yield self.render_control_resource(self.rsrc, b'/test', requestJson=requestJson) self.assertJsonRpcError( message=message, jsonrpccode=JSONRPC_CODES['invalid_request']) def test_bad_req_jsonrpc_missing(self): return self.do_test_invalid_req( '{"method": "foo", "id":"abcdef", "params": {}}', "missing key 'jsonrpc'") def test_bad_req_jsonrpc_type(self): return self.do_test_invalid_req( '{"jsonrpc": 13, "method": "foo", "id":"abcdef", "params": {}}', "'jsonrpc' must be a string") def test_bad_req_jsonrpc_value(self): return self.do_test_invalid_req( '{"jsonrpc": "3.0", "method": "foo", "id":"abcdef", "params": {}}', "only JSONRPC 2.0 is supported") def test_bad_req_method_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "id":"abcdef", "params": {}}', "missing key 'method'") def test_bad_req_method_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": 999, "id":"abcdef", "params": {}}', "'method' must be a string") def test_bad_req_id_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "params": {}}', "missing key 'id'") def test_bad_req_id_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": {}, "params": {}}', "'id' must be a string, number, or null") def test_bad_req_params_missing(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": "abc"}', "missing key 'params'") def test_bad_req_params_type(self): return self.do_test_invalid_req( '{"jsonrpc": "2.0", "method": "foo", "id": "abc", "params": 999}', "'params' must be an object") @defer.inlineCallbacks def test_valid(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="testy", params={'foo': 3, 'bar': 5}) self.assertRequest( contentJson={ 'id': self.UUID, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'foo': 3, 'bar': 5, 'owner': 'anonymous'}, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_valid_int_id(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="testy", params={'foo': 3, 'bar': 5}, id=1823) self.assertRequest( contentJson={ 'id': 1823, 'jsonrpc': '2.0', 'result': { 'action': 'testy', 'args': {'foo': 3, 'bar': 5, 'owner': 'anonymous', }, 'kwargs': {'testid': 13}, }, }, contentType=b'application/json', responseCode=200) @defer.inlineCallbacks def test_valid_fails(self): yield self.render_control_resource(self.rsrc, b'/test/13', action="fail") self.assertJsonRpcError( message=re.compile('^RuntimeError'), jsonrpccode=JSONRPC_CODES['internal_error'], responseCode=500) # the error gets logged, too: self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) @defer.inlineCallbacks def test_authz_forbidden(self): def deny(request, ep, action, options): if "13" in ep: raise authz.Forbidden("no no") return None self.master.www.assertUserAllowed = deny yield self.render_control_resource(self.rsrc, b'/test/13', action="fail") self.assertJsonRpcError( message=re.compile('no no'), jsonrpccode=JSONRPC_CODES['invalid_request'], responseCode=403) class ContentTypeParser(unittest.TestCase): def test_simple(self): self.assertEqual( rest.ContentTypeParser(b"application/json").gettype(), "application/json") def test_complex(self): self.assertEqual(rest.ContentTypeParser(b"application/json; Charset=UTF-8").gettype(), "application/json") def test_text(self): self.assertEqual( rest.ContentTypeParser(b"text/plain; Charset=UTF-8").gettype(), "text/plain") buildbot-2.6.0/master/buildbot/test/unit/test_www_roles.py000066400000000000000000000074641361162603000240320ustar00rootroot00000000000000# redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.trial import unittest from buildbot.test.util.config import ConfigErrorsMixin from buildbot.www.authz import roles class RolesFromGroups(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromGroups("buildbot-") def test_noGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer")) self.assertEqual(ret, []) def test_noBuildbotGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer", groups=["employee"])) self.assertEqual(ret, []) def test_someBuildbotGroups(self): ret = self.roles.getRolesFromUser(dict( username="homer", groups=["employee", "buildbot-maintainer", "buildbot-admin"])) self.assertEqual(ret, ["maintainer", "admin"]) class RolesFromEmails(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromEmails( employee=["homer@plant.com", "burns@plant.com"], boss=["burns@plant.com"]) def test_noUser(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com")) self.assertEqual(ret, []) def test_User1(self): ret = self.roles.getRolesFromUser(dict( username="homer", email="homer@plant.com")) self.assertEqual(ret, ["employee"]) def test_User2(self): ret = self.roles.getRolesFromUser(dict( username="burns", email="burns@plant.com")) self.assertEqual(sorted(ret), ["boss", "employee"]) class RolesFromOwner(unittest.TestCase): def setUp(self): self.roles = roles.RolesFromOwner("ownerofbuild") def test_noOwner(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com"), None) self.assertEqual(ret, []) def test_notOwner(self): ret = self.roles.getRolesFromUser(dict( username="lisa", email="lisa@school.com"), "homer@plant.com") self.assertEqual(ret, []) def test_owner(self): ret = self.roles.getRolesFromUser(dict( username="homer", email="homer@plant.com"), "homer@plant.com") self.assertEqual(ret, ["ownerofbuild"]) class RolesFromUsername(unittest.TestCase, ConfigErrorsMixin): def setUp(self): self.roles = roles.RolesFromUsername(roles=["admins"], usernames=["Admin"]) self.roles2 = roles.RolesFromUsername( roles=["developers", "integrators"], usernames=["Alice", "Bob"]) def test_anonymous(self): ret = self.roles.getRolesFromUser(dict(anonymous=True)) self.assertEqual(ret, []) def test_normalUser(self): ret = self.roles.getRolesFromUser(dict(username="Alice")) self.assertEqual(ret, []) def test_admin(self): ret = self.roles.getRolesFromUser(dict(username="Admin")) self.assertEqual(ret, ["admins"]) def test_multipleGroups(self): ret = self.roles2.getRolesFromUser(dict(username="Bob")) self.assertEqual(ret, ["developers", "integrators"]) def test_badUsernames(self): with self.assertRaisesConfigError('Usernames cannot be None'): roles.RolesFromUsername(roles=[], usernames=[None]) buildbot-2.6.0/master/buildbot/test/unit/test_www_service.py000066400000000000000000000242171361162603000243410ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import jwt import mock from twisted.cred import strcred from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.internet import defer from twisted.trial import unittest from twisted.web._auth.wrapper import HTTPAuthSessionWrapper from twisted.web.server import Request from buildbot.test.unit import test_www_hooks_base from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth from buildbot.www import change_hook from buildbot.www import resource from buildbot.www import rest from buildbot.www import service class FakeChannel: transport = None def isSecure(self): return False def getPeer(self): return None def getHost(self): return None class NeedsReconfigResource(resource.Resource): needsReconfig = True reconfigs = 0 def reconfigResource(self, config): NeedsReconfigResource.reconfigs += 1 class Test(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() self.master = self.make_master(url='h:/a/b/') self.svc = self.master.www = service.WWWService() yield self.svc.setServiceParent(self.master) def makeConfig(self, **kwargs): w = dict(port=None, auth=auth.NoAuth(), logfileName='l') w.update(kwargs) new_config = mock.Mock() new_config.www = w new_config.buildbotURL = 'h:/' self.master.config = new_config return new_config @defer.inlineCallbacks def test_reconfigService_no_port(self): new_config = self.makeConfig() yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(self.svc.site, None) @defer.inlineCallbacks def test_reconfigService_reconfigResources(self): new_config = self.makeConfig(port=8080) self.patch(rest, 'RestRootResource', NeedsReconfigResource) NeedsReconfigResource.reconfigs = 0 # first time, reconfigResource gets called along with setupSite yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(NeedsReconfigResource.reconfigs, 1) # and the next time, setupSite isn't called, but reconfigResource is yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertEqual(NeedsReconfigResource.reconfigs, 2) @defer.inlineCallbacks def test_reconfigService_port(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, 20) @defer.inlineCallbacks def test_reconfigService_expiration_time(self): new_config = self.makeConfig(port=80, cookie_expiration_time=datetime.timedelta(minutes=1)) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(service.BuildbotSession.expDelay, datetime.timedelta(minutes=1)) @defer.inlineCallbacks def test_reconfigService_port_changes(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) newer_config = self.makeConfig(port=999) yield self.svc.reconfigServiceWithBuildbotConfig(newer_config) self.assertNotEqual(self.svc.site, None) self.assertNotEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, 999) @defer.inlineCallbacks def test_reconfigService_port_changes_to_none(self): new_config = self.makeConfig(port=20) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) newer_config = self.makeConfig() yield self.svc.reconfigServiceWithBuildbotConfig(newer_config) # (note the site sticks around) self.assertEqual(self.svc.port_service, None) self.assertEqual(self.svc.port, None) def test_setupSite(self): self.svc.setupSite(self.makeConfig()) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() self.assertIsInstance(root.getChildWithDefault(b'api', req), rest.RestRootResource) def test_setupSiteWithProtectedHook(self): checker = InMemoryUsernamePasswordDatabaseDontUse() checker.addUser("guest", "password") self.svc.setupSite(self.makeConfig( change_hook_dialects={'base': True}, change_hook_auth=[checker])) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() self.assertIsInstance(root.getChildWithDefault(b'change_hook', req), HTTPAuthSessionWrapper) @defer.inlineCallbacks def test_setupSiteWithHook(self): new_config = self.makeConfig( change_hook_dialects={'base': True}) self.svc.setupSite(new_config) site = self.svc.site # check that it has the right kind of resources attached to its # root root = site.resource req = mock.Mock() ep = root.getChildWithDefault(b'change_hook', req) self.assertIsInstance(ep, change_hook.ChangeHookResource) # not yet configured self.assertEqual(ep.dialects, {}) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) # now configured self.assertEqual(ep.dialects, {'base': True}) rsrc = self.svc.site.resource.getChildWithDefault(b'change_hook', mock.Mock()) path = b'/change_hook/base' request = test_www_hooks_base._prepare_request({}) self.master.data.updates.addChange = mock.Mock() yield self.render_resource(rsrc, path, request=request) self.master.data.updates.addChange.assert_called() @defer.inlineCallbacks def test_setupSiteWithHookAndAuth(self): fn = self.mktemp() with open(fn, 'w') as f: f.write("user:pass") new_config = self.makeConfig( port=8080, plugins={}, change_hook_dialects={'base': True}, change_hook_auth=[strcred.makeChecker("file:" + fn)]) self.svc.setupSite(new_config) yield self.svc.reconfigServiceWithBuildbotConfig(new_config) rsrc = self.svc.site.resource.getChildWithDefault(b'', mock.Mock()) res = yield self.render_resource(rsrc, b'') self.assertIn(b'{"type": "file"}', res) rsrc = self.svc.site.resource.getChildWithDefault( b'change_hook', mock.Mock()) res = yield self.render_resource(rsrc, b'/change_hook/base') # as UnauthorizedResource is in private namespace, we cannot use # assertIsInstance :-( self.assertIn('UnauthorizedResource', repr(res)) class TestBuildbotSite(unittest.SynchronousTestCase): SECRET = 'secret' def setUp(self): self.site = service.BuildbotSite(None, "logs", 0, 0) self.site.setSessionSecret(self.SECRET) def test_getSession_from_bad_jwt(self): """ if the cookie is bad (maybe from previous version of buildbot), then we should raise KeyError for consumption by caller, and log the JWT error """ with self.assertRaises(KeyError): self.site.getSession("xxx") self.flushLoggedErrors(jwt.exceptions.DecodeError) def test_getSession_from_correct_jwt(self): payload = {'user_info': {'some': 'payload'}} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) session = self.site.getSession(uid) self.assertEqual(session.user_info, {'some': 'payload'}) def test_getSession_from_expired_jwt(self): # expired one week ago exp = datetime.datetime.utcnow() - datetime.timedelta(weeks=1) exp = calendar.timegm(datetime.datetime.timetuple(exp)) payload = {'user_info': {'some': 'payload'}, 'exp': exp} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) with self.assertRaises(KeyError): self.site.getSession(uid) def test_getSession_with_no_user_info(self): payload = {'foo': 'bar'} uid = jwt.encode(payload, self.SECRET, algorithm=service.SESSION_SECRET_ALGORITHM) with self.assertRaises(KeyError): self.site.getSession(uid) def test_makeSession(self): session = self.site.makeSession() self.assertEqual(session.user_info, {'anonymous': True}) def test_updateSession(self): session = self.site.makeSession() request = Request(FakeChannel(), False) request.sitepath = [b"bb"] session.updateSession(request) self.assertEqual(len(request.cookies), 1) name, value = request.cookies[0].split(b";")[0].split(b"=") decoded = jwt.decode(value, self.SECRET, algorithms=[service.SESSION_SECRET_ALGORITHM]) self.assertEqual(decoded['user_info'], {'anonymous': True}) self.assertIn('exp', decoded) def test_absentServerHeader(self): request = Request(FakeChannel(), False) self.assertEqual(request.responseHeaders.hasHeader('Server'), False) buildbot-2.6.0/master/buildbot/test/unit/test_www_sse.py000066400000000000000000000117321361162603000234710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import datetime import json from twisted.trial import unittest from buildbot.test.unit import test_data_changes from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import unicode2bytes from buildbot.www import sse class EventResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = master = self.make_master(url=b'h:/a/b/') self.sse = sse.EventResource(master) def test_simpleapi(self): self.render_resource(self.sse, b'/changes/*/*') self.readUUID(self.request) self.assertReceivesChangeNewMessage(self.request) self.assertEqual(self.request.finished, False) def test_listen(self): self.render_resource(self.sse, b'/listen/changes/*/*') self.readUUID(self.request) self.assertReceivesChangeNewMessage(self.request) self.assertEqual(self.request.finished, False) def test_listen_add_then_close(self): self.render_resource(self.sse, b'/listen') request = self.request self.request = None uuid = self.readUUID(request) self.render_resource(self.sse, b'/add/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertReceivesChangeNewMessage(request) self.assertEqual(self.request.finished, True) self.assertEqual(request.finished, False) request.finish() # fake close connection on client side with self.assertRaises(AssertionError): self.assertReceivesChangeNewMessage(request) def test_listen_add_then_remove(self): self.render_resource(self.sse, b'/listen') request = self.request uuid = self.readUUID(request) self.render_resource(self.sse, b'/add/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertReceivesChangeNewMessage(request) self.assertEqual(request.finished, False) self.render_resource(self.sse, b'/remove/' + unicode2bytes(uuid) + b"/changes/*/*") with self.assertRaises(AssertionError): self.assertReceivesChangeNewMessage(request) def test_listen_add_nouuid(self): self.render_resource(self.sse, b'/listen') request = self.request self.readUUID(request) self.render_resource(self.sse, b'/add/') self.assertEqual(self.request.finished, True) self.assertEqual(self.request.responseCode, 400) self.assertIn(b"need uuid", self.request.written) def test_listen_add_baduuid(self): self.render_resource(self.sse, b'/listen') request = self.request self.readUUID(request) self.render_resource(self.sse, b'/add/foo') self.assertEqual(self.request.finished, True) self.assertEqual(self.request.responseCode, 400) self.assertIn(b"unknown uuid", self.request.written) def readEvent(self, request): kw = {} hasEmptyLine = False for line in request.written.splitlines(): if line.find(b":") > 0: k, v = line.split(b": ", 1) self.assertTrue(k not in kw, k + b" in " + unicode2bytes(str(kw))) kw[k] = v else: self.assertEqual(line, b"") hasEmptyLine = True request.written = b"" self.assertTrue(hasEmptyLine) return kw def readUUID(self, request): kw = self.readEvent(request) self.assertEqual(kw[b"event"], b"handshake") return kw[b"data"] def assertReceivesChangeNewMessage(self, request): self.master.mq.callConsumer( ("changes", "500", "new"), test_data_changes.Change.changeEvent) kw = self.readEvent(request) self.assertEqual(kw[b"event"], b"event") msg = json.loads(bytes2unicode(kw[b"data"])) self.assertEqual(msg["key"], ['changes', '500', 'new']) self.assertEqual(msg["message"], json.loads( json.dumps(test_data_changes.Change.changeEvent, default=self._toJson))) def _toJson(self, obj): if isinstance(obj, datetime.datetime): return datetime2epoch(obj) buildbot-2.6.0/master/buildbot/test/unit/test_www_ws.py000066400000000000000000000077121361162603000233330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json from mock import Mock from twisted.trial import unittest from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.util import bytes2unicode from buildbot.www import ws class WsResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.master = master = self.make_master(url='h:/a/b/') self.ws = ws.WsResource(master) self.proto = self.ws._factory.buildProtocol("me") self.gotMsg = [] self.proto.sendMessage = Mock(spec=self.proto.sendMessage) def assert_called_with_json(self, obj, expected_json): jsonArg = obj.call_args[0][0] jsonArg = bytes2unicode(jsonArg) actual_json = json.loads(jsonArg) self.assertEqual(actual_json, expected_json) def test_ping(self): self.proto.onMessage(json.dumps(dict(cmd="ping", _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"msg": "pong", "code": 200, "_id": 1}) def test_bad_cmd(self): self.proto.onMessage(json.dumps(dict(cmd="poing", _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"_id": 1, "code": 404, "error": "no such command 'poing'"}) def test_no_cmd(self): self.proto.onMessage(json.dumps(dict(_id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"_id": None, "code": 400, "error": "no 'cmd' in websocket frame"}) def test_no_id(self): self.proto.onMessage(json.dumps(dict(cmd="ping")), False) self.assert_called_with_json(self.proto.sendMessage, {"_id": None, "code": 400, "error": "no '_id' in websocket frame"}) def test_startConsuming(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path="builds/*/*", _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 1}) self.master.mq.verifyMessages = False self.master.mq.callConsumer(("builds", "1", "new"), {"buildid": 1}) self.assert_called_with_json(self.proto.sendMessage, {"k": "builds/1/new", "m": {"buildid": 1}}) def test_startConsumingBadPath(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path={}, _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"_id": 1, "code": 400, "error": "invalid path format '{}'"}) def test_stopConsumingNotRegistered(self): self.proto.onMessage( json.dumps(dict(cmd="stopConsuming", path="builds/*/*", _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"_id": 1, "code": 400, "error": "path was not consumed \'builds/*/*\'"}) def test_stopConsuming(self): self.proto.onMessage( json.dumps(dict(cmd="startConsuming", path="builds/*/*", _id=1)), False) self.assert_called_with_json(self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 1}) self.proto.onMessage( json.dumps(dict(cmd="stopConsuming", path="builds/*/*", _id=2)), False) self.assert_called_with_json(self.proto.sendMessage, {"msg": "OK", "code": 200, "_id": 2}) buildbot-2.6.0/master/buildbot/test/util/000077500000000000000000000000001361162603000203545ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/util/__init__.py000066400000000000000000000000001361162603000224530ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/test/util/changesource.py000066400000000000000000000070531361162603000234010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from buildbot.test.fake import fakemaster class ChangeSourceMixin: """ This class is used for testing change sources, and handles a few things: - starting and stopping a ChangeSource service - a fake master with a data API implementation """ changesource = None started = False DUMMY_CHANGESOURCE_ID = 20 OTHER_MASTER_ID = 93 DEFAULT_NAME = "ChangeSource" def setUpChangeSource(self): "Set up the mixin - returns a deferred." self.master = fakemaster.make_master(self, wantDb=True, wantData=True) assert not hasattr(self.master, 'addChange') # just checking.. return defer.succeed(None) @defer.inlineCallbacks def tearDownChangeSource(self): "Tear down the mixin - returns a deferred." if not self.started: return if self.changesource.running: yield defer.maybeDeferred(self.changesource.stopService) yield self.changesource.disownServiceParent() return def attachChangeSource(self, cs): "Set up a change source for testing; sets its .master attribute" self.changesource = cs # FIXME some changesource does not have master property yet but # mailchangesource has :-/ try: self.changesource.master = self.master except AttributeError: self.changesource.setServiceParent(self.master) # configure the service to let secret manager render the secrets d = self.changesource.configureService() d.addErrback(lambda _: None) # also, now that changesources are ClusteredServices, setting up # the clock here helps in the unit tests that check that behavior self.changesource.clock = task.Clock() def startChangeSource(self): "start the change source as a service" self.started = True return self.changesource.startService() @defer.inlineCallbacks def stopChangeSource(self): "stop the change source again; returns a deferred" yield self.changesource.stopService() self.started = False def setChangeSourceToMaster(self, otherMaster): # some tests build the CS late, so for those tests we will require that # they use the default name in order to run tests that require master # assignments if self.changesource is not None: name = self.changesource.name else: name = self.DEFAULT_NAME self.master.data.updates.changesourceIds[ name] = self.DUMMY_CHANGESOURCE_ID if otherMaster: self.master.data.updates.changesourceMasters[ self.DUMMY_CHANGESOURCE_ID] = otherMaster else: del self.master.data.updates.changesourceMasters[ self.DUMMY_CHANGESOURCE_ID] buildbot-2.6.0/master/buildbot/test/util/config.py000066400000000000000000000044371361162603000222030ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot import config class _AssertRaisesConfigErrorContext: def __init__(self, substr_or_re, case): self.substr_or_re = substr_or_re self.case = case def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): if exc_type is None: self.case.fail("ConfigErrors not raised") if not issubclass(exc_type, config.ConfigErrors): self.case.fail("ConfigErrors not raised, instead got {0}".format( exc_type.__name__)) self.case.assertConfigError(exc_value, self.substr_or_re) return True class ConfigErrorsMixin: def assertConfigError(self, errors, substr_or_re): if len(errors.errors) > 1: self.fail("too many errors: %s" % (errors.errors,)) elif not errors.errors: self.fail("expected error did not occur") else: curr_error = errors.errors[0] if isinstance(substr_or_re, str): if substr_or_re not in curr_error: self.fail("non-matching error: %s, " "expected: %s" % (curr_error, substr_or_re)) else: if not substr_or_re.search(curr_error): self.fail("non-matching error: %s" % (curr_error,)) def assertRaisesConfigError(self, substr_or_re, fn=None): context = _AssertRaisesConfigErrorContext(substr_or_re, self) if fn is None: return context with context: fn() def assertNoConfigErrors(self, errors): self.assertEqual(errors.errors, []) buildbot-2.6.0/master/buildbot/test/util/configurators.py000066400000000000000000000051671361162603000236240ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.config import MasterConfig class ConfiguratorMixin: """ Support for testing configurators. @ivar configurator: the configurator under test @ivar config_dict: the config dict that the configurator is modifying """ def setUp(self): self.config_dict = {} def setupConfigurator(self, *args, **kwargs): self.configurator = self.ConfiguratorClass(*args, **kwargs) return self.configurator.configure(self.config_dict) def expectWorker(self, name, klass): if 'workers' in self.config_dict and 'slaves' in self.config_dict: self.fail("both 'workers' and 'slaves' are in the config dict!") for worker in self.config_dict.get('workers', []) + self.config_dict.get('slaves', []): if isinstance(worker, klass) and worker.name == name: return worker self.fail("expected a worker named {} of class {}".format(name, klass)) def expectScheduler(self, name, klass): for scheduler in self.config_dict['schedulers']: if scheduler.name == name and isinstance(scheduler, klass): return scheduler self.fail("expected a scheduler named {} of class {}".format(name, klass)) def expectBuilder(self, name): for builder in self.config_dict['builders']: if builder.name == name: return builder self.fail("expected a builder named {}".format(name)) def expectBuilderHasSteps(self, name, step_classes): builder = self.expectBuilder(name) for step_class in step_classes: found = [ step for step in builder.factory.steps if step.factory == step_class ] if not found: self.fail("expected a buildstep of {!r} in {}".format(step_class, name)) def expectNoConfigError(self): config = MasterConfig() config.loadFromDict(self.config_dict, "test") buildbot-2.6.0/master/buildbot/test/util/connector_component.py000066400000000000000000000044011361162603000250010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import types from twisted.internet import defer from buildbot.db import model from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util.misc import TestReactorMixin class FakeDBConnector: pass class ConnectorComponentMixin(TestReactorMixin, db.RealDatabaseMixin): """ Implements a mock DBConnector object, replete with a thread pool and a DB model. This includes a RealDatabaseMixin, so subclasses should not instantiate that class directly. The connector appears at C{self.db}, and the component should be attached to it as an attribute. @ivar db: fake database connector @ivar db.pool: DB thread pool @ivar db.model: DB model """ @defer.inlineCallbacks def setUpConnectorComponent(self, table_names=None, basedir='basedir', dialect_name='sqlite'): self.setUpTestReactor() """Set up C{self.db}, using the given db_url and basedir.""" if table_names is None: table_names = [] yield self.setUpRealDatabase(table_names=table_names, basedir=basedir) self.db = FakeDBConnector() self.db.pool = self.db_pool self.db.master = fakemaster.make_master(self) self.db.model = model.Model(self.db) self.db._engine = types.SimpleNamespace(dialect=types.SimpleNamespace(name=dialect_name)) @defer.inlineCallbacks def tearDownConnectorComponent(self): yield self.tearDownRealDatabase() # break some reference loops, just for fun del self.db.pool del self.db.model del self.db buildbot-2.6.0/master/buildbot/test/util/db.py000066400000000000000000000263361361162603000213250ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from sqlalchemy.schema import MetaData from twisted.internet import defer from twisted.internet import reactor from twisted.python import log from twisted.trial import unittest from buildbot.db import enginestrategy from buildbot.db import model from buildbot.db import pool from buildbot.db.connector import DBConnector from buildbot.util.sautils import sa_version from buildbot.util.sautils import withoutSqliteForeignKeys def skip_for_dialect(dialect): """Decorator to skip a test for a particular SQLAlchemy dialect.""" def dec(fn): def wrap(self, *args, **kwargs): if self.db_engine.dialect.name == dialect: raise unittest.SkipTest( "Not supported on dialect '%s'" % dialect) return fn(self, *args, **kwargs) return wrap return dec class RealDatabaseMixin: """ A class that sets up a real database for testing. This sets self.db_url to the URL for the database. By default, it specifies an in-memory SQLite database, but if the BUILDBOT_TEST_DB_URL environment variable is set, it will use the specified database, being careful to clean out *all* tables in the database before and after the tests are run - so each test starts with a clean database. @ivar db_pool: a (real) DBThreadPool instance that can be used as desired @ivar db_url: the DB URL used to run these tests @ivar db_engine: the engine created for the test database Note that this class uses the production database model. A re-implementation would be virtually identical and just require extra work to keep synchronized. Similarly, this class uses the production DB thread pool. This achieves a few things: - affords more thorough tests for the pool - avoids repetitive implementation - cooperates better at runtime with thread-sensitive DBAPI's Finally, it duplicates initialization performed in db.connector.DBConnector.setup(). Never call that method in tests that use RealDatabaseMixin, use RealDatabaseWithConnectorMixin. """ def __thd_clean_database(self, conn): # In general it's nearly impossible to do "bullet proof" database # cleanup with SQLAlchemy that will work on a range of databases # and they configurations. # # Following approaches were considered. # # 1. Drop Buildbot Model schema: # # model.Model.metadata.drop_all(bind=conn, checkfirst=True) # # Dropping schema from model is correct and working operation only # if database schema is exactly corresponds to the model schema. # # If it is not (e.g. migration script failed or migration results in # old version of model), then some tables outside model schema may be # present, which may reference tables in the model schema. # In this case either dropping model schema will fail (if database # enforces referential integrity, e.g. PostgreSQL), or # dropping left tables in the code below will fail (if database allows # removing of tables on which other tables have references, # e.g. SQLite). # # 2. Introspect database contents and drop found tables. # # meta = MetaData(bind=conn) # meta.reflect() # meta.drop_all() # # May fail if schema contains reference cycles (and Buildbot schema # has them). Reflection looses metadata about how reference cycles # can be teared up (e.g. use_alter=True). # Introspection may fail if schema has invalid references # (e.g. possible in SQLite). # # 3. What is actually needed here is accurate code for each engine # and each engine configuration that will drop all tables, # indexes, constraints, etc in proper order or in a proper way # (using tables alternation, or DROP TABLE ... CASCADE, etc). # # Conclusion: use approach 2 with manually teared apart known # reference cycles. # pylint: disable=too-many-nested-blocks try: meta = MetaData(bind=conn) # Reflect database contents. May fail, e.g. if table references # non-existent table in SQLite. meta.reflect() # Table.foreign_key_constraints introduced in SQLAlchemy 1.0. if sa_version()[:2] >= (1, 0): # Restore `use_alter` settings to break known reference cycles. # Main goal of this part is to remove SQLAlchemy warning # about reference cycle. # Looks like it's OK to do it only with SQLAlchemy >= 1.0.0, # since it's not issued in SQLAlchemy == 0.8.0 # List of reference links (table_name, ref_table_name) that # should be broken by adding use_alter=True. table_referenced_table_links = [ ('buildsets', 'builds'), ('builds', 'buildrequests')] for table_name, ref_table_name in table_referenced_table_links: if table_name in meta.tables: table = meta.tables[table_name] for fkc in table.foreign_key_constraints: if fkc.referred_table.name == ref_table_name: fkc.use_alter = True # Drop all reflected tables and indices. May fail, e.g. if # SQLAlchemy wouldn't be able to break circular references. # Sqlalchemy fk support with sqlite is not yet perfect, so we must deactivate fk during that # operation, even though we made our possible to use use_alter with withoutSqliteForeignKeys(conn.engine, conn): meta.drop_all() except Exception: # sometimes this goes badly wrong; being able to see the schema # can be a big help if conn.engine.dialect.name == 'sqlite': r = conn.execute("select sql from sqlite_master " "where type='table'") log.msg("Current schema:") for row in r.fetchall(): log.msg(row.sql) raise def __thd_create_tables(self, conn, table_names): table_names_set = set(table_names) tables = [t for t in model.Model.metadata.tables.values() if t.name in table_names_set] # Create tables using create_all() method. This way not only tables # and direct indices are created, but also deferred references # (that use use_alter=True in definition). model.Model.metadata.create_all( bind=conn, tables=tables, checkfirst=True) @defer.inlineCallbacks def setUpRealDatabase(self, table_names=None, basedir='basedir', want_pool=True, sqlite_memory=True): """ Set up a database. Ordinarily sets up an engine and a pool and takes care of cleaning out any existing tables in the database. If C{want_pool} is false, then no pool will be created, and the database will not be cleaned. @param table_names: list of names of tables to instantiate @param basedir: (optional) basedir for the engine @param want_pool: (optional) false to not create C{self.db_pool} @param sqlite_memory: (optional) False to avoid using an in-memory db @returns: Deferred """ if table_names is None: table_names = [] self.__want_pool = want_pool default_sqlite = 'sqlite://' self.db_url = os.environ.get('BUILDBOT_TEST_DB_URL', default_sqlite) if not sqlite_memory and self.db_url == default_sqlite: self.db_url = "sqlite:///tmp.sqlite" if not os.path.exists(basedir): os.makedirs(basedir) self.basedir = basedir self.db_engine = enginestrategy.create_engine(self.db_url, basedir=basedir) # if the caller does not want a pool, we're done. if not want_pool: return None self.db_pool = pool.DBThreadPool(self.db_engine, reactor=reactor) log.msg("cleaning database %s" % self.db_url) yield self.db_pool.do(self.__thd_clean_database) yield self.db_pool.do(self.__thd_create_tables, table_names) @defer.inlineCallbacks def tearDownRealDatabase(self): if self.__want_pool: yield self.db_pool.do(self.__thd_clean_database) yield self.db_pool.shutdown() @defer.inlineCallbacks def insertTestData(self, rows): """Insert test data into the database for use during the test. @param rows: be a sequence of L{fakedb.Row} instances. These will be sorted by table dependencies, so order does not matter. @returns: Deferred """ # sort the tables by dependency all_table_names = {row.table for row in rows} ordered_tables = [t for t in model.Model.metadata.sorted_tables if t.name in all_table_names] def thd(conn): # insert into tables -- in order for tbl in ordered_tables: for row in [r for r in rows if r.table == tbl.name]: tbl = model.Model.metadata.tables[row.table] try: tbl.insert(bind=conn).execute(row.values) except Exception: log.msg("while inserting %s - %s" % (row, row.values)) raise yield self.db_pool.do(thd) class RealDatabaseWithConnectorMixin(RealDatabaseMixin): # Same as RealDatabaseMixin, except that a real DBConnector is also setup in a correct way. @defer.inlineCallbacks def setUpRealDatabaseWithConnector(self, master, table_names=None, basedir='basedir', want_pool=True, sqlite_memory=True): yield self.setUpRealDatabase(table_names, basedir, want_pool, sqlite_memory) master.config.db['db_url'] = self.db_url master.db = DBConnector(self.basedir) yield master.db.setServiceParent(master) master.db.pool = self.db_pool def tearDownRealDatabaseWithConnector(self): return self.tearDownRealDatabase() class TestCase(unittest.TestCase): @defer.inlineCallbacks def assertFailure(self, d, excp): exception = None try: yield d except Exception as e: exception = e self.assertIsInstance(exception, excp) self.flushLoggedErrors(excp) buildbot-2.6.0/master/buildbot/test/util/decorators.py000066400000000000000000000042141361162603000230740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Various decorators for test cases """ import os import sys from twisted.python import runtime _FLAKY_ENV_VAR = 'RUN_FLAKY_TESTS' def todo(message): """ decorator to mark a todo test """ def wrap(func): """ just mark the test """ func.todo = message return func return wrap def flaky(bugNumber=None, issueNumber=None, onPlatform=None): def wrap(fn): if onPlatform is not None and sys.platform != onPlatform: return fn if os.environ.get(_FLAKY_ENV_VAR): return fn if bugNumber is not None: fn.skip = ("Flaky test (http://trac.buildbot.net/ticket/%d) " "- set $%s to run anyway" % (bugNumber, _FLAKY_ENV_VAR)) if issueNumber is not None: fn.skip = ("Flaky test (https://github.com/buildbot/buildbot/issues/%d) " "- set $%s to run anyway" % (issueNumber, _FLAKY_ENV_VAR)) return fn return wrap def skipUnlessPlatformIs(platform): def closure(test): if runtime.platformType != platform: test.skip = "not a %s platform" % platform return test return closure def skipIfPythonVersionIsLess(min_version_info): assert isinstance(min_version_info, tuple) def closure(test): if sys.version_info < min_version_info: test.skip = "requires Python >= {0}".format(min_version_info) return test return closure buildbot-2.6.0/master/buildbot/test/util/dirs.py000066400000000000000000000026211361162603000216700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import shutil from twisted.internet import defer class DirsMixin: _dirs = None def setUpDirs(self, *dirs): """Make sure C{dirs} exist and are empty, and set them up to be deleted in tearDown.""" self._dirs = map(os.path.abspath, dirs) for dir in self._dirs: if os.path.exists(dir): shutil.rmtree(dir) os.makedirs(dir) # return a deferred to make chaining easier return defer.succeed(None) def tearDownDirs(self): for dir in self._dirs: if os.path.exists(dir): shutil.rmtree(dir) # return a deferred to make chaining easier return defer.succeed(None) buildbot-2.6.0/master/buildbot/test/util/endpoint.py000066400000000000000000000101241361162603000225440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.data import base from buildbot.data import resultspec from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces from buildbot.test.util import validation from buildbot.test.util.misc import TestReactorMixin from buildbot.util import pathmatch class EndpointMixin(TestReactorMixin, interfaces.InterfaceTests): # test mixin for testing Endpoint subclasses # class being tested endpointClass = None # the corresponding resource type - this will be instantiated at # self.data.rtypes[rtype.type] and self.rtype resourceTypeClass = None def setUpEndpoint(self): self.setUpTestReactor() self.master = fakemaster.make_master(self, wantMq=True, wantDb=True, wantData=True) self.db = self.master.db self.mq = self.master.mq self.data = self.master.data self.matcher = pathmatch.Matcher() rtype = self.rtype = self.resourceTypeClass(self.master) setattr(self.data.rtypes, rtype.name, rtype) self.ep = self.endpointClass(rtype, self.master) # this usually fails when a single-element pathPattern does not have a # trailing comma pathPatterns = self.ep.pathPatterns.split() for pp in pathPatterns: if pp == '/': continue if not pp.startswith('/') or pp.endswith('/'): raise AssertionError("invalid pattern %r" % (pp,)) pathPatterns = [tuple(pp.split('/')[1:]) for pp in pathPatterns] for pp in pathPatterns: self.matcher[pp] = self.ep self.pathArgs = [ {arg.split(':', 1)[1] for arg in pp if ':' in arg} for pp in pathPatterns if pp is not None] def tearDownEndpoint(self): pass def validateData(self, object): validation.verifyData(self, self.rtype.entityType, {}, object) # call methods, with extra checks @defer.inlineCallbacks def callGet(self, path, resultSpec=None): self.assertIsInstance(path, tuple) if resultSpec is None: resultSpec = resultspec.ResultSpec() endpoint, kwargs = self.matcher[path] self.assertIdentical(endpoint, self.ep) rv = yield endpoint.get(resultSpec, kwargs) if self.ep.isCollection: self.assertIsInstance(rv, (list, base.ListResult)) else: self.assertIsInstance(rv, (dict, type(None))) return rv def callControl(self, action, args, path): self.assertIsInstance(path, tuple) endpoint, kwargs = self.matcher[path] self.assertIdentical(endpoint, self.ep) d = self.ep.control(action, args, kwargs) self.assertIsInstance(d, defer.Deferred) return d # interface tests def test_get_spec(self): @self.assertArgSpecMatches(self.ep.get) def get(self, resultSpec, kwargs): pass def test_control_spec(self): @self.assertArgSpecMatches(self.ep.control) def control(self, action, args, kwargs): pass def test_rootLinkName(self): rootLinkName = self.ep.rootLinkName if not rootLinkName: return try: self.assertEqual(self.matcher[(rootLinkName,)][0], self.ep) except KeyError: self.fail('No match for rootlink: ' + rootLinkName) buildbot-2.6.0/master/buildbot/test/util/fuzz.py000066400000000000000000000024331361162603000217260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import defer from twisted.internet import reactor from twisted.trial import unittest class FuzzTestCase(unittest.TestCase): # run each test case for 10s FUZZ_TIME = 10 @defer.inlineCallbacks def test_fuzz(self): # note that this will loop if do_fuzz doesn't take long enough endTime = reactor.seconds() + self.FUZZ_TIME while reactor.seconds() < endTime: yield self.do_fuzz(endTime) # delete this test case entirely if fuzzing is not enabled if 'BUILDBOT_FUZZ' not in os.environ: del test_fuzz buildbot-2.6.0/master/buildbot/test/util/gpo.py000066400000000000000000000100161361162603000215110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import utils def _check_env_is_expected(test, expected_env, env): if expected_env is None: return env = env or {} for var, value in expected_env.items(): test.assertEqual(env.get(var), value, 'Expected environment to have %s = %r' % (var, value)) class Expect: _stdout = b"" _stderr = b"" _exit = 0 _path = None _env = None def __init__(self, bin, *args): self._bin = bin self._args = args def stdout(self, stdout): assert(isinstance(stdout, bytes)) self._stdout = stdout return self def stderr(self, stderr): assert(isinstance(stderr, bytes)) self._stderr = stderr return self def exit(self, exit): self._exit = exit return self def path(self, path): self._path = path return self def env(self, env): self._env = env return self def check(self, test, bin, path, args, env): test.assertDictEqual( dict(bin=bin, path=path, args=tuple(args)), dict(bin=self._bin, path=self._path, args=self._args), "unexpected command run") _check_env_is_expected(test, self._env, env) return (self._stdout, self._stderr, self._exit) def __repr__(self): return "" % (self._bin, self._args) class GetProcessOutputMixin: longMessage = True def setUpGetProcessOutput(self): self._gpo_patched = False self._expected_commands = [] self._gpo_expect_env = {} def assertAllCommandsRan(self): self.assertEqual(self._expected_commands, [], "assert all expected commands were run") @defer.inlineCallbacks def patched_getProcessOutput(self, bin, args, env=None, errortoo=False, path=None): stdout, stderr, exit = \ yield self.patched_getProcessOutputAndValue(bin, args, env=env, path=path) if errortoo: return stdout + stderr if stderr: raise IOError("got stderr: %r" % (stderr,)) return stdout def patched_getProcessOutputAndValue(self, bin, args, env=None, path=None): _check_env_is_expected(self, self._gpo_expect_env, env) if not self._expected_commands: self.fail("got command %s %s when no further commands were expected" % (bin, args)) expect = self._expected_commands.pop(0) return defer.succeed(expect.check(self, bin, path, args, env)) def _patch_gpo(self): if not self._gpo_patched: self.patch(utils, "getProcessOutput", self.patched_getProcessOutput) self.patch(utils, "getProcessOutputAndValue", self.patched_getProcessOutputAndValue) self._gpo_patched = True def addGetProcessOutputExpectEnv(self, d): self._gpo_expect_env.update(d) def expectCommands(self, *exp): """ Add to the expected commands, along with their results. Each argument should be an instance of L{Expect}. """ self._patch_gpo() self._expected_commands.extend(exp) buildbot-2.6.0/master/buildbot/test/util/integration.py000066400000000000000000000306471361162603000232630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import re import sys from io import StringIO import mock from twisted.internet import defer from twisted.internet import reactor from twisted.python.filepath import FilePath from twisted.trial import unittest from zope.interface import implementer from buildbot.config import MasterConfig from buildbot.data import resultspec from buildbot.interfaces import IConfigLoader from buildbot.master import BuildMaster from buildbot.plugins import worker from buildbot.process.properties import Interpolate from buildbot.process.results import SUCCESS from buildbot.process.results import statusToString from buildbot.test.util.misc import DebugIntegrationLogsMixin from buildbot.test.util.misc import TestReactorMixin from buildbot.test.util.sandboxed_worker import SandboxedWorker from buildbot.worker.local import LocalWorker try: from buildbot_worker.bot import Worker except ImportError: Worker = None @implementer(IConfigLoader) class DictLoader: def __init__(self, config_dict): self.config_dict = config_dict def loadConfig(self): return MasterConfig.loadFromDict(self.config_dict, '') @defer.inlineCallbacks def getMaster(case, reactor, config_dict): """ Create a started ``BuildMaster`` with the given configuration. """ basedir = FilePath(case.mktemp()) basedir.createDirectory() config_dict['buildbotNetUsageData'] = None master = BuildMaster( basedir.path, reactor=reactor, config_loader=DictLoader(config_dict)) if 'db_url' not in config_dict: config_dict['db_url'] = 'sqlite://' # TODO: Allow BuildMaster to transparently upgrade the database, at least # for tests. master.config.db['db_url'] = config_dict['db_url'] yield master.db.setup(check_version=False) yield master.db.model.upgrade() master.db.setup = lambda: None yield master.startService() case.addCleanup(master.db.pool.shutdown) case.addCleanup(master.stopService) return master class RunFakeMasterTestCase(unittest.TestCase, TestReactorMixin, DebugIntegrationLogsMixin): def setUp(self): self.setUpTestReactor() self.setupDebugIntegrationLogs() def tearDown(self): self.assertFalse(self.master.running, "master is still running!") @defer.inlineCallbacks def getMaster(self, config_dict): self.master = yield getMaster(self, self.reactor, config_dict) return self.master @defer.inlineCallbacks def reconfigMaster(self, config_dict): self.master.config_loader.config_dict = config_dict yield self.master.doReconfig() def createLocalWorker(self, name, **kwargs): workdir = FilePath(self.mktemp()) workdir.createDirectory() return LocalWorker(name, workdir.path, **kwargs) @defer.inlineCallbacks def assertBuildResults(self, build_id, result): dbdict = yield self.master.db.builds.getBuild(build_id) self.assertEqual(result, dbdict['results']) @defer.inlineCallbacks def createBuildrequest(self, master, builder_ids, properties=None): properties = properties.asDict() if properties is not None else None ret = yield master.data.updates.addBuildset( waited_for=False, builderids=builder_ids, sourcestamps=[ {'codebase': '', 'repository': '', 'branch': None, 'revision': None, 'project': ''}, ], properties=properties, ) return ret class RunMasterBase(unittest.TestCase): proto = "null" if Worker is None: skip = "buildbot-worker package is not installed" @defer.inlineCallbacks def setupConfig(self, config_dict, startWorker=True): """ Setup and start a master configured by the function configFunc defined in the test module. @type config_dict: dict @param configFunc: The BuildmasterConfig dictionary. """ # mock reactor.stop (which trial *really* doesn't # like test code to call!) stop = mock.create_autospec(reactor.stop) self.patch(reactor, 'stop', stop) if startWorker: if self.proto == 'pb': proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}} workerclass = worker.Worker elif self.proto == 'null': proto = {"null": {}} workerclass = worker.LocalWorker config_dict['workers'] = [workerclass("local1", password=Interpolate("localpw"), missing_timeout=0)] config_dict['protocols'] = proto m = yield getMaster(self, reactor, config_dict) self.master = m self.assertFalse(stop.called, "startService tried to stop the reactor; check logs") if not startWorker: return if self.proto == 'pb': # We find out the worker port automatically workerPort = list(m.pbmanager.dispatchers.values())[ 0].port.getHost().port # create a worker, and attach it to the master, it will be started, and stopped # along with the master worker_dir = FilePath(self.mktemp()) worker_dir.createDirectory() sandboxed_worker_path = os.environ.get( "SANDBOXED_WORKER_PATH", None) if sandboxed_worker_path is None: self.w = Worker( "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path, False) else: self.w = SandboxedWorker( "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path, sandboxed_worker_path) self.addCleanup(self.w.shutdownWorker) elif self.proto == 'null': self.w = None if self.w is not None: yield self.w.setServiceParent(m) @defer.inlineCallbacks def dump(): if not self._passed: dump = StringIO() print(u"FAILED! dumping build db for debug", file=dump) builds = yield self.master.data.get(("builds",)) for build in builds: yield self.printBuild(build, dump, withLogs=True) raise self.failureException(dump.getvalue()) self.addCleanup(dump) @defer.inlineCallbacks def doForceBuild(self, wantSteps=False, wantProperties=False, wantLogs=False, useChange=False, forceParams=None, triggerCallback=None): if forceParams is None: forceParams = {} # force a build, and wait until it is finished d = defer.Deferred() # in order to allow trigger based integration tests # we wait until the first started build is finished self.firstbsid = None def newCallback(_, data): if self.firstbsid is None: self.firstbsid = data['bsid'] newConsumer.stopConsuming() def finishedCallback(_, data): if self.firstbsid == data['bsid']: d.callback(data) newConsumer = yield self.master.mq.startConsuming( newCallback, ('buildsets', None, 'new')) finishedConsumer = yield self.master.mq.startConsuming( finishedCallback, ('buildsets', None, 'complete')) if triggerCallback is not None: yield triggerCallback() elif useChange is False: # use data api to force a build yield self.master.data.control("force", forceParams, ("forceschedulers", "force")) else: # use data api to force a build, via a new change yield self.master.data.updates.addChange(**useChange) # wait until we receive the build finished event buildset = yield d buildrequests = yield self.master.data.get( ('buildrequests',), filters=[resultspec.Filter('buildsetid', 'eq', [buildset['bsid']])]) buildrequest = buildrequests[-1] builds = yield self.master.data.get( ('builds',), filters=[resultspec.Filter('buildrequestid', 'eq', [buildrequest['buildrequestid']])]) # if the build has been retried, there will be several matching builds. # We return the last build build = builds[-1] finishedConsumer.stopConsuming() yield self.enrichBuild(build, wantSteps, wantProperties, wantLogs) return build @defer.inlineCallbacks def enrichBuild(self, build, wantSteps=False, wantProperties=False, wantLogs=False): # enrich the build result, with the step results if wantSteps: build["steps"] = yield self.master.data.get(("builds", build['buildid'], "steps")) # enrich the step result, with the logs results if wantLogs: build["steps"] = list(build["steps"]) for step in build["steps"]: step['logs'] = yield self.master.data.get(("steps", step['stepid'], "logs")) step["logs"] = list(step['logs']) for log in step["logs"]: log['contents'] = yield self.master.data.get(("logs", log['logid'], "contents")) if wantProperties: build["properties"] = yield self.master.data.get(("builds", build['buildid'], "properties")) @defer.inlineCallbacks def printBuild(self, build, out=sys.stdout, withLogs=False): # helper for debugging: print a build yield self.enrichBuild(build, wantSteps=True, wantProperties=True, wantLogs=True) print(u"*** BUILD %d *** ==> %s (%s)" % (build['buildid'], build['state_string'], statusToString(build['results'])), file=out) for step in build['steps']: print(u" *** STEP %s *** ==> %s (%s)" % (step['name'], step['state_string'], statusToString(step['results'])), file=out) for url in step['urls']: print(u" url:%s (%s)" % (url['name'], url['url']), file=out) for log in step['logs']: print(u" log:%s (%d)" % (log['name'], log['num_lines']), file=out) if step['results'] != SUCCESS or withLogs: self.printLog(log, out) @defer.inlineCallbacks def checkBuildStepLogExist(self, build, expectedLog, onlyStdout=False, regex=False): yield self.enrichBuild(build, wantSteps=True, wantProperties=True, wantLogs=True) for step in build['steps']: for log in step['logs']: for line in log['contents']['content'].splitlines(): if onlyStdout and line[0] != 'o': continue if regex: if re.search(expectedLog, line): return True else: if expectedLog in line: return True return False def printLog(self, log, out): print(u" " * 8 + "*********** LOG: %s *********" % (log['name'],), file=out) if log['type'] == 's': for line in log['contents']['content'].splitlines(): linetype = line[0] line = line[1:] if linetype == 'h': # cyan line = "\x1b[36m" + line + "\x1b[0m" if linetype == 'e': # red line = "\x1b[31m" + line + "\x1b[0m" print(u" " * 8 + line) else: print(u"" + log['contents']['content'], file=out) print(u" " * 8 + "********************************", file=out) buildbot-2.6.0/master/buildbot/test/util/interfaces.py000066400000000000000000000113141361162603000230510ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect import pkg_resources import zope.interface.interface from twisted.trial import unittest from zope.interface.interface import Attribute class InterfaceTests: # assertions def assertArgSpecMatches(self, actualMethod, *fakeMethods): """Usage:: @self.assertArgSpecMatches(obj.methodUnderTest) def methodTemplate(self, arg1, arg2): pass or, more useful when you will be faking out C{methodUnderTest}: self.assertArgSpecMatches(obj.methodUnderTest, self.fakeMethod) """ def filter(spec): # the tricky thing here is to align args and defaults, since the # defaults correspond to the *last* n elements of args. To make # things easier, we go in reverse, and keep a separate counter for # the defaults args = spec[0] defaults = list(spec[3] if spec[3] is not None else []) di = -1 for ai in range(len(args) - 1, -1, -1): arg = args[ai] if arg.startswith('_') or (arg == 'self' and ai == 0): del args[ai] if -di <= len(defaults): del defaults[di] di += 1 di -= 1 return (args, spec[1], spec[2], defaults or None) def remove_decorators(func): try: return func.__wrapped__ except AttributeError: return func def filter_argspec(func): return filter( inspect.getfullargspec(remove_decorators(func))) def assert_same_argspec(expected, actual): if expected != actual: msg = "Expected: %s; got: %s" % ( inspect.formatargspec(*expected), inspect.formatargspec(*actual)) self.fail(msg) actual_argspec = filter_argspec(actualMethod) for fakeMethod in fakeMethods: fake_argspec = filter_argspec(fakeMethod) assert_same_argspec(actual_argspec, fake_argspec) def assert_same_argspec_decorator(decorated): expected_argspec = filter_argspec(decorated) assert_same_argspec(expected_argspec, actual_argspec) # The decorated function works as usual. return decorated return assert_same_argspec_decorator def assertInterfacesImplemented(self, cls): "Given a class, assert that the zope.interface.Interfaces are implemented to specification." # see if this version of zope.interface is too old to run these tests zi_vers = pkg_resources.working_set.find( pkg_resources.Requirement.parse('zope.interface')).version if pkg_resources.parse_version(zi_vers) < pkg_resources.parse_version('4.1.1'): raise unittest.SkipTest( "zope.interfaces is too old to run this test") for interface in zope.interface.implementedBy(cls): for attr, template_argspec in interface.namesAndDescriptions(): if not hasattr(cls, attr): msg = "Expected: %r; to implement: %s as specified in %r" % ( cls, attr, interface) self.fail(msg) actual_argspec = getattr(cls, attr) if isinstance(template_argspec, Attribute): continue # else check method signatures while hasattr(actual_argspec, '__wrapped__'): actual_argspec = actual_argspec.__wrapped__ actual_argspec = zope.interface.interface.fromMethod( actual_argspec) if actual_argspec.getSignatureInfo() != template_argspec.getSignatureInfo(): msg = "%s: expected: %s; got: %s" % ( attr, template_argspec.getSignatureString(), actual_argspec.getSignatureString()) self.fail(msg) buildbot-2.6.0/master/buildbot/test/util/logging.py000066400000000000000000000035411361162603000223570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.python import log class LoggingMixin: def setUpLogging(self): self._logEvents = [] log.addObserver(self._logEvents.append) self.addCleanup(log.removeObserver, self._logEvents.append) def logContainsMessage(self, regexp): r = re.compile(regexp) for event in self._logEvents: msg = log.textFromEventDict(event) if msg is not None: assert not msg.startswith("Unable to format event"), msg if msg is not None and r.search(msg): return True return False def assertLogged(self, regexp): if not self.logContainsMessage(regexp): self.fail("%r not matched in log output.\n%s " % ( regexp, [log.textFromEventDict(e) for e in self._logEvents])) def assertNotLogged(self, regexp): if self.logContainsMessage(regexp): self.fail("%r matched in log output.\n%s " % ( regexp, [log.textFromEventDict(e) for e in self._logEvents])) def assertWasQuiet(self): self.assertEqual([ log.textFromEventDict(event) for event in self._logEvents], []) buildbot-2.6.0/master/buildbot/test/util/migration.py000066400000000000000000000077751361162603000227370ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import migrate import migrate.versioning.api import sqlalchemy as sa from twisted.internet import defer from twisted.python import log from buildbot.db import connector from buildbot.test.fake import fakemaster from buildbot.test.util import db from buildbot.test.util import dirs from buildbot.test.util import querylog from buildbot.test.util.misc import TestReactorMixin from buildbot.util import sautils # test_upgrade vs. migration tests # # test_upgrade is an integration test -- it tests the whole upgrade process, # including the code in model.py. Migrate tests are unit tests, and test a # single db upgrade script. class MigrateTestMixin(TestReactorMixin, db.RealDatabaseMixin, dirs.DirsMixin): @defer.inlineCallbacks def setUpMigrateTest(self): self.setUpTestReactor() self.basedir = os.path.abspath("basedir") self.setUpDirs('basedir') yield self.setUpRealDatabase() master = fakemaster.make_master(self) self.db = connector.DBConnector(self.basedir) yield self.db.setServiceParent(master) self.db.pool = self.db_pool def tearDownMigrateTest(self): self.tearDownDirs() return self.tearDownRealDatabase() @defer.inlineCallbacks def do_test_migration(self, base_version, target_version, setup_thd_cb, verify_thd_cb): def setup_thd(conn): metadata = sa.MetaData() table = sautils.Table( 'migrate_version', metadata, sa.Column('repository_id', sa.String(250), primary_key=True), sa.Column('repository_path', sa.Text), sa.Column('version', sa.Integer), ) table.create(bind=conn) conn.execute(table.insert(), repository_id='Buildbot', repository_path=self.db.model.repo_path, version=base_version) setup_thd_cb(conn) yield self.db.pool.do(setup_thd) def upgrade_thd(engine): with querylog.log_queries(): schema = migrate.versioning.schema.ControlledSchema( engine, self.db.model.repo_path) changeset = schema.changeset(target_version) with sautils.withoutSqliteForeignKeys(engine): for version, change in changeset: log.msg('upgrading to schema version %d' % (version + 1)) schema.runchange(version, change, 1) yield self.db.pool.do_with_engine(upgrade_thd) def check_table_charsets_thd(engine): # charsets are only a problem for MySQL if engine.dialect.name != 'mysql': return dbs = [r[0] for r in engine.execute("show tables")] for tbl in dbs: r = engine.execute("show create table %s" % tbl) create_table = r.fetchone()[1] self.assertIn('DEFAULT CHARSET=utf8', create_table, "table %s does not have the utf8 charset" % tbl) yield self.db.pool.do(check_table_charsets_thd) def verify_thd(engine): with sautils.withoutSqliteForeignKeys(engine): verify_thd_cb(engine) yield self.db.pool.do(verify_thd) buildbot-2.6.0/master/buildbot/test/util/misc.py000066400000000000000000000132051361162603000216620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os import sys from twisted.internet import threads from twisted.python import log from twisted.python import threadpool from twisted.python.compat import NativeStringIO from twisted.trial.unittest import TestCase import buildbot from buildbot.process.buildstep import BuildStep from buildbot.test.fake.reactor import NonThreadPool from buildbot.test.fake.reactor import TestReactor from buildbot.util.eventual import _setReactor class PatcherMixin: """ Mix this in to get a few special-cased patching methods """ def patch_os_uname(self, replacement): # twisted's 'patch' doesn't handle the case where an attribute # doesn't exist.. if hasattr(os, 'uname'): self.patch(os, 'uname', replacement) else: def cleanup(): del os.uname self.addCleanup(cleanup) os.uname = replacement class StdoutAssertionsMixin: """ Mix this in to be able to assert on stdout during the test """ def setUpStdoutAssertions(self): self.stdout = NativeStringIO() self.patch(sys, 'stdout', self.stdout) def assertWasQuiet(self): self.assertEqual(self.stdout.getvalue(), '') def assertInStdout(self, exp): self.assertIn(exp, self.stdout.getvalue()) def getStdout(self): return self.stdout.getvalue().strip() class TestReactorMixin: """ Mix this in to get TestReactor as self.reactor which is correctly cleaned up at the end """ def setUpTestReactor(self): self.patch(threadpool, 'ThreadPool', NonThreadPool) self.reactor = TestReactor() _setReactor(self.reactor) def deferToThread(f, *args, **kwargs): return threads.deferToThreadPool(self.reactor, self.reactor.getThreadPool(), f, *args, **kwargs) self.patch(threads, 'deferToThread', deferToThread) # During shutdown sequence we must first stop the reactor and only then # set unset the reactor used for eventually() because any callbacks # that are run during reactor.stop() may use eventually() themselves. self.addCleanup(_setReactor, None) self.addCleanup(self.reactor.stop) class TimeoutableTestCase(TestCase): # The addCleanup in current Twisted does not time out any functions # registered via addCleanups. Until we can depend on fixed Twisted, use # TimeoutableTestCase whenever test failure may cause it to block and not # report anything. def deferRunCleanups(self, ignored, result): self._deferRunCleanupResult = result d = self._run('deferRunCleanupsTimeoutable', result) d.addErrback(self._ebGotMaybeTimeout, result) return d def _ebGotMaybeTimeout(self, failure, result): result.addError(self, failure) def deferRunCleanupsTimeoutable(self): return super().deferRunCleanups(None, self._deferRunCleanupResult) def encodeExecutableAndArgs(executable, args, encoding="utf-8"): """ Encode executable and arguments from unicode to bytes. This avoids a deprecation warning when calling reactor.spawnProcess() """ if isinstance(executable, str): executable = executable.encode(encoding) argsBytes = [] for arg in args: if isinstance(arg, str): arg = arg.encode(encoding) argsBytes.append(arg) return (executable, argsBytes) def enable_trace(case, trace_exclusions=None, f=sys.stdout): """This function can be called to enable tracing of the execution """ if trace_exclusions is None: trace_exclusions = [ "twisted", "worker_transition.py", "util/tu", "util/path", "log.py", "/mq/", "/db/", "buildbot/data/", "fake/reactor.py" ] bbbase = os.path.dirname(buildbot.__file__) state = {'indent': 0} def tracefunc(frame, event, arg): if frame.f_code.co_filename.startswith(bbbase): if not any(te in frame.f_code.co_filename for te in trace_exclusions): if event == "call": state['indent'] += 2 print("-" * state['indent'], frame.f_code.co_filename.replace(bbbase, ""), frame.f_code.co_name, frame.f_code.co_varnames, file=f) if event == "return": state['indent'] -= 2 return tracefunc sys.settrace(tracefunc) case.addCleanup(sys.settrace, lambda _a, _b, _c: None) class DebugIntegrationLogsMixin: def setupDebugIntegrationLogs(self): # to ease debugging we display the error logs in the test log origAddCompleteLog = BuildStep.addCompleteLog def addCompleteLog(self, name, _log): if name.endswith("err.text"): log.msg("got error log!", name, _log) return origAddCompleteLog(self, name, _log) self.patch(BuildStep, "addCompleteLog", addCompleteLog) if 'BBTRACE' in os.environ: enable_trace(self) buildbot-2.6.0/master/buildbot/test/util/notifier.py000066400000000000000000000067051361162603000225550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.reporters import utils from buildbot.test.fake import fakedb class NotifierTestMixin: @defer.inlineCallbacks def setupBuildResults(self, results, wantPreviousBuild=False): # this testsuite always goes through setupBuildResults so that # the data is sure to be the real data schema known coming from data # api self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Buildset(id=98, results=results, reason="testReason1"), fakedb.Builder(id=80, name='Builder1'), fakedb.BuildRequest(id=11, buildsetid=98, builderid=80), fakedb.Build(id=20, number=0, builderid=80, buildrequestid=11, workerid=13, masterid=92, results=results), fakedb.Step(id=50, buildid=20, number=5, name='make'), fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp(id=234, patchid=99), fakedb.Change(changeid=13, branch='trunk', revision='9283', author='me@foo', repository='svn://...', codebase='cbsvn', project='world-domination', sourcestampid=234), fakedb.Log(id=60, stepid=50, name='stdio', slug='stdio', type='s', num_lines=7), fakedb.LogChunk(logid=60, first_line=0, last_line=1, compressed=0, content='Unicode log with non-ascii (\u00E5\u00E4\u00F6).'), fakedb.Patch(id=99, patch_base64='aGVsbG8sIHdvcmxk', patch_author='him@foo', patch_comment='foo', subdir='/foo', patchlevel=3), ]) for _id in (20,): self.db.insertTestData([ fakedb.BuildProperty( buildid=_id, name="workername", value="wrk"), fakedb.BuildProperty( buildid=_id, name="reason", value="because"), fakedb.BuildProperty( buildid=_id, name="scheduler", value="checkin"), fakedb.BuildProperty( buildid=_id, name="branch", value="master"), ]) res = yield utils.getDetailsForBuildset(self.master, 98, wantProperties=True, wantPreviousBuild=wantPreviousBuild) builds = res['builds'] buildset = res['buildset'] @defer.inlineCallbacks def getChangesForBuild(buildid): assert buildid == 20 ch = yield self.master.db.changes.getChange(13) return [ch] self.master.db.changes.getChangesForBuild = getChangesForBuild return (buildset, builds) buildbot-2.6.0/master/buildbot/test/util/patch_delay.py000066400000000000000000000055061361162603000232110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # Portions of this file include source code of Python 3.7 from # cpython/Lib/unittest/mock.py file. # # It is licensed under PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2. # Copyright (c) 2001-2019 Python Software Foundation. All rights reserved. import contextlib import functools import mock from twisted.internet import defer def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) def _importer(target): components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _get_target(target): try: target, attribute = target.rsplit('.', 1) except (TypeError, ValueError): raise TypeError("Need a valid target to patch. You supplied: %r" % (target,)) return _importer(target), attribute class DelayWrapper: def __init__(self): self._deferreds = [] def add_new(self): d = defer.Deferred() self._deferreds.append(d) return d def __len__(self): return len(self._deferreds) def fire(self): deferreds = self._deferreds self._deferreds = [] for d in deferreds: d.callback(None) @contextlib.contextmanager def patchForDelay(target_name): class Default: pass default = Default() target, attribute = _get_target(target_name) original = getattr(target, attribute, default) if original is default: raise Exception('Could not find name {}'.format(target_name)) if not callable(original): raise Exception('{} is not callable'.format(target_name)) delay = DelayWrapper() @functools.wraps(original) @defer.inlineCallbacks def wrapper(*args, **kwargs): yield delay.add_new() return (yield original(*args, **kwargs)) with mock.patch(target_name, new=wrapper): try: yield delay finally: delay.fire() buildbot-2.6.0/master/buildbot/test/util/pbmanager.py000066400000000000000000000040571361162603000226700ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer class PBManagerMixin: def setUpPBChangeSource(self): "Set up a fake self.pbmanager." self.registrations = [] self.unregistrations = [] pbm = self.pbmanager = mock.Mock() pbm.register = self._fake_register def _fake_register(self, portstr, username, password, factory): reg = mock.Mock() def unregister(): self.unregistrations.append((portstr, username, password)) return defer.succeed(None) reg.unregister = unregister self.registrations.append((portstr, username, password)) return reg def assertNotRegistered(self): self.assertEqual(self.registrations, []) def assertNotUnregistered(self): self.assertEqual(self.unregistrations, []) def assertRegistered(self, portstr, username, password): for ps, un, pw in self.registrations: if ps == portstr and username == un and pw == password: return self.fail("not registered: %r not in %s" % ((portstr, username, password), self.registrations)) def assertUnregistered(self, portstr, username, password): for ps, un, pw in self.unregistrations: if ps == portstr and username == un and pw == password: return self.fail("still registered") buildbot-2.6.0/master/buildbot/test/util/properties.py000066400000000000000000000017111361162603000231220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from zope.interface import implementer from buildbot.interfaces import IRenderable @implementer(IRenderable) class ConstantRenderable: def __init__(self, value): self.value = value def getRenderingFor(self, props): return self.value buildbot-2.6.0/master/buildbot/test/util/protocols.py000066400000000000000000000047221361162603000227570ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.test.util import interfaces class ConnectionInterfaceTest(interfaces.InterfaceTests): def setUp(self): # subclasses must set self.conn in this method raise NotImplementedError def test_sig_notifyOnDisconnect(self): @self.assertArgSpecMatches(self.conn.notifyOnDisconnect) def notifyOnDisconnect(self, cb): pass def test_sig_loseConnection(self): @self.assertArgSpecMatches(self.conn.loseConnection) def loseConnection(self): pass def test_sig_remotePrint(self): @self.assertArgSpecMatches(self.conn.remotePrint) def remotePrint(self, message): pass def test_sig_remoteGetWorkerInfo(self): @self.assertArgSpecMatches(self.conn.remoteGetWorkerInfo) def remoteGetWorkerInfo(self): pass def test_sig_remoteSetBuilderList(self): @self.assertArgSpecMatches(self.conn.remoteSetBuilderList) def remoteSetBuilderList(self, builders): pass def test_sig_remoteStartCommand(self): @self.assertArgSpecMatches(self.conn.remoteStartCommand) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): pass def test_sig_remoteShutdown(self): @self.assertArgSpecMatches(self.conn.remoteShutdown) def remoteShutdown(self): pass def test_sig_remoteStartBuild(self): @self.assertArgSpecMatches(self.conn.remoteStartBuild) def remoteStartBuild(self, builderName): pass def test_sig_remoteInterruptCommand(self): @self.assertArgSpecMatches(self.conn.remoteInterruptCommand) def remoteInterruptCommand(builderName, commandId, why): pass buildbot-2.6.0/master/buildbot/test/util/querylog.py000066400000000000000000000065711361162603000226060ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import contextlib import logging from twisted.python import log # These routines provides a way to dump SQLAlchemy SQL commands and their # results into Twisted's log. # Logging wrappers are not re-entrant. class _QueryToTwistedHandler(logging.Handler): def __init__(self, log_query_result=False, record_mode=False): super().__init__() self._log_query_result = log_query_result self.recordMode = record_mode self.records = [] def emit(self, record): if self.recordMode: self.records.append(record.getMessage()) return if record.levelno == logging.DEBUG: if self._log_query_result: log.msg("{name}:{thread}:result: {msg}".format( name=record.name, thread=record.threadName, msg=record.getMessage())) else: log.msg("{name}:{thread}:query: {msg}".format( name=record.name, thread=record.threadName, msg=record.getMessage())) def start_log_queries(log_query_result=False, record_mode=False): handler = _QueryToTwistedHandler( log_query_result=log_query_result, record_mode=record_mode) # In 'sqlalchemy.engine' logging namespace SQLAlchemy outputs SQL queries # on INFO level, and SQL queries results on DEBUG level. logger = logging.getLogger('sqlalchemy.engine') # TODO: this is not documented field of logger, so it's probably private. handler.prev_level = logger.level logger.setLevel(logging.DEBUG) logger.addHandler(handler) # Do not propagate SQL echoing into ancestor handlers handler.prev_propagate = logger.propagate logger.propagate = False # Return previous values of settings, so they can be carefully restored # later. return handler def stop_log_queries(handler): assert isinstance(handler, _QueryToTwistedHandler) logger = logging.getLogger('sqlalchemy.engine') logger.removeHandler(handler) # Restore logger settings or set them to reasonable defaults. logger.propagate = handler.prev_propagate logger.setLevel(handler.prev_level) @contextlib.contextmanager def log_queries(): handler = start_log_queries() try: yield finally: stop_log_queries(handler) class SqliteMaxVariableMixin: @contextlib.contextmanager def assertNoMaxVariables(self): handler = start_log_queries(record_mode=True) try: yield finally: stop_log_queries(handler) for line in handler.records: self.assertFalse(line.count("?") > 999, "too much variables in " + line) buildbot-2.6.0/master/buildbot/test/util/reporter.py000066400000000000000000000072301361162603000225720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.test.fake import fakedb class ReporterTestMixin: TEST_PROJECT = 'testProject' TEST_REPO = 'https://example.org/repo' TEST_REVISION = 'd34db33fd43db33f' TEST_CODEBASE = 'cbgerrit' TEST_CHANGE_ID = 'I5bdc2e500d00607af53f0fa4df661aada17f81fc' TEST_BUILDER_NAME = 'Builder0' TEST_PROPS = { 'Stash_branch': 'refs/changes/34/1234/1', 'project': TEST_PROJECT, 'got_revision': TEST_REVISION, 'revision': TEST_REVISION, 'event.change.id': TEST_CHANGE_ID, 'event.change.project': TEST_PROJECT, 'branch': 'refs/pull/34/merge', } THING_URL = 'http://thing.example.com' def insertTestData(self, buildResults, finalResult, insertSS=True): self.db = self.master.db self.db.insertTestData([ fakedb.Master(id=92), fakedb.Worker(id=13, name='wrk'), fakedb.Builder(id=79, name='Builder0'), fakedb.Builder(id=80, name='Builder1'), fakedb.Buildset(id=98, results=finalResult, reason="testReason1"), fakedb.Change(changeid=13, branch='master', revision='9283', author='me@foo', repository=self.TEST_REPO, codebase=self.TEST_CODEBASE, project='world-domination', sourcestampid=234), ]) if insertSS: self.db.insertTestData([ fakedb.BuildsetSourceStamp(buildsetid=98, sourcestampid=234), fakedb.SourceStamp( id=234, project=self.TEST_PROJECT, revision=self.TEST_REVISION, repository=self.TEST_REPO, codebase=self.TEST_CODEBASE) ]) for i, results in enumerate(buildResults): self.db.insertTestData([ fakedb.BuildRequest( id=11 + i, buildsetid=98, builderid=79 + i), fakedb.Build(id=20 + i, number=i, builderid=79 + i, buildrequestid=11 + i, workerid=13, masterid=92, results=results, state_string="buildText"), fakedb.Step(id=50 + i, buildid=20 + i, number=5, name='make'), fakedb.Log(id=60 + i, stepid=50 + i, name='stdio', slug='stdio', type='s', num_lines=7), fakedb.LogChunk(logid=60 + i, first_line=0, last_line=1, compressed=0, content='Unicode log with non-ascii (\u00E5\u00E4\u00F6).'), fakedb.BuildProperty( buildid=20 + i, name="workername", value="wrk"), fakedb.BuildProperty( buildid=20 + i, name="reason", value="because"), fakedb.BuildProperty( buildid=20 + i, name="buildername", value="Builder0"), ]) for k, v in self.TEST_PROPS.items(): self.db.insertTestData([ fakedb.BuildProperty(buildid=20 + i, name=k, value=v) ]) buildbot-2.6.0/master/buildbot/test/util/sandboxed_worker.py000066400000000000000000000062761361162603000243010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import subprocess from twisted.internet import defer from twisted.internet import protocol from twisted.internet import reactor from buildbot.util.service import AsyncService class WorkerProcessProtocol(protocol.ProcessProtocol): def __init__(self): self.finished_deferred = defer.Deferred() def outReceived(self, data): print(data) def errReceived(self, data): print(data) def processEnded(self, _): self.finished_deferred.callback(None) def waitForFinish(self): return self.finished_deferred class SandboxedWorker(AsyncService): def __init__(self, masterhost, port, name, passwd, workerdir, sandboxed_worker_path): self.masterhost = masterhost self.port = port self.workername = name self.workerpasswd = passwd self.workerdir = workerdir self.sandboxed_worker_path = sandboxed_worker_path self.worker = None def startService(self): # Note that we create the worker with sync API # We don't really care as we are in tests res = subprocess.run([self.sandboxed_worker_path, "create-worker", '-q', self.workerdir, self.masterhost + ":" + str(self.port), self.workername, self.workerpasswd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False) if res.returncode != 0: # we do care about finding out why it failed though raise RuntimeError("\n".join([ "Unable to create worker!", res.stdout.decode(), res.stderr.decode() ])) self.processprotocol = processProtocol = WorkerProcessProtocol() # we need to spawn the worker asynchronously though args = [self.sandboxed_worker_path, 'start', '--nodaemon', self.workerdir] self.process = reactor.spawnProcess(processProtocol, self.sandboxed_worker_path, args=args) self.worker = self.master.workers.getWorkerByName(self.workername) return super().startService() @defer.inlineCallbacks def shutdownWorker(self): if self.worker is None: return # on windows, we killing a process does not work well. # we use the graceful shutdown feature of buildbot-worker instead to kill the worker # but we must do that before the master is stopping. yield self.worker.shutdown() # wait for process to disappear yield self.processprotocol.waitForFinish() buildbot-2.6.0/master/buildbot/test/util/scheduler.py000066400000000000000000000233161361162603000227110ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.schedulers import base from buildbot.test.fake import fakedb from buildbot.test.fake import fakemaster from buildbot.test.util import interfaces class SchedulerMixin(interfaces.InterfaceTests): """ This class fakes out enough of a master and the various relevant database connectors to test schedulers. All of the database methods have identical signatures to the real database connectors, but for ease of testing always return an already-fired Deferred, meaning that there is no need to wait for events to complete. This class is tightly coupled with the various L{buildbot.test.fake.fakedb} module. All instance variables are only available after C{attachScheduler} has been called. @ivar sched: scheduler instance @ivar master: the fake master @ivar db: the fake db (same as C{self.master.db}, but shorter) """ OTHER_MASTER_ID = 93 def setUpScheduler(self): self.master = fakemaster.make_master(self, wantDb=True, wantMq=True, wantData=True) def tearDownScheduler(self): pass def attachScheduler(self, scheduler, objectid, schedulerid, overrideBuildsetMethods=False, createBuilderDB=False): """Set up a scheduler with a fake master and db; sets self.sched, and sets the master's basedir to the absolute path of 'basedir' in the test directory. If C{overrideBuildsetMethods} is true, then all of the addBuildsetForXxx methods are overridden to simply append the method name and arguments to self.addBuildsetCalls. These overridden methods return buildsets starting with 500 and buildrequest IDs starting with 100. For C{addBuildsetForSourceStamp}, this also overrides DB API methods C{addSourceStamp} and C{addSourceStampSet}, and uses that information to generate C{addBuildsetForSourceStamp} results. @returns: scheduler """ scheduler.objectid = objectid # set up a fake master db = self.db = self.master.db self.mq = self.master.mq scheduler.setServiceParent(self.master) rows = [fakedb.Object(id=objectid, name=scheduler.name, class_name='SomeScheduler'), fakedb.Scheduler(id=schedulerid, name=scheduler.name), ] if createBuilderDB is True: rows.extend([fakedb.Builder(name=bname) for bname in scheduler.builderNames]) db.insertTestData(rows) if overrideBuildsetMethods: for method in ( 'addBuildsetForSourceStampsWithDefaults', 'addBuildsetForChanges', 'addBuildsetForSourceStamps'): actual = getattr(scheduler, method) fake = getattr(self, 'fake_%s' % method) self.assertArgSpecMatches(actual, fake) setattr(scheduler, method, fake) self.addBuildsetCalls = [] self._bsidGenerator = iter(range(500, 999)) self._bridGenerator = iter(range(100, 999)) # temporarily override the sourcestamp and sourcestampset methods self.addedSourceStamps = [] self.addedSourceStampSets = [] def fake_addSourceStamp(**kwargs): self.assertEqual(kwargs['sourcestampsetid'], 400 + len(self.addedSourceStampSets) - 1) self.addedSourceStamps.append(kwargs) return defer.succeed(300 + len(self.addedSourceStamps) - 1) self.db.sourcestamps.addSourceStamp = fake_addSourceStamp def fake_addSourceStampSet(): self.addedSourceStampSets.append([]) return defer.succeed(400 + len(self.addedSourceStampSets) - 1) self.db.sourcestamps.addSourceStampSet = fake_addSourceStampSet # patch methods to detect a failure to upcall the activate and # deactivate methods .. unless we're testing BaseScheduler def patch(meth): oldMethod = getattr(scheduler, meth) @defer.inlineCallbacks def newMethod(): self._parentMethodCalled = False rv = yield defer.maybeDeferred(oldMethod) self.assertTrue(self._parentMethodCalled, "'%s' did not call its parent" % meth) return rv setattr(scheduler, meth, newMethod) oldParent = getattr(base.BaseScheduler, meth) def newParent(self_): self._parentMethodCalled = True return oldParent(self_) self.patch(base.BaseScheduler, meth, newParent) if scheduler.__class__.activate != base.BaseScheduler.activate: patch('activate') if scheduler.__class__.deactivate != base.BaseScheduler.deactivate: patch('deactivate') self.sched = scheduler return scheduler @defer.inlineCallbacks def setSchedulerToMaster(self, otherMaster): sched_id = yield self.master.data.updates.findSchedulerId(self.sched.name) if otherMaster: self.master.data.updates.schedulerMasters[sched_id] = otherMaster else: del self.master.data.updates.schedulerMasters[sched_id] class FakeChange: who = '' files = [] comments = '' isdir = 0 links = None revision = None when = None branch = None category = None revlink = '' properties = {} repository = '' project = '' codebase = '' def makeFakeChange(self, **kwargs): """Utility method to make a fake Change object with the given attributes""" ch = self.FakeChange() ch.__dict__.update(kwargs) return ch @defer.inlineCallbacks def _addBuildsetReturnValue(self, builderNames): if builderNames is None: builderNames = self.sched.builderNames builderids = [] builders = yield self.db.builders.getBuilders() for builderName in builderNames: for bldrDict in builders: if builderName == bldrDict["name"]: builderids.append(bldrDict["id"]) break assert len(builderids) == len(builderNames) bsid = next(self._bsidGenerator) brids = dict(zip(builderids, self._bridGenerator)) return (bsid, brids) def fake_addBuildsetForSourceStampsWithDefaults(self, reason, sourcestamps=None, waited_for=False, properties=None, builderNames=None, **kw): properties = properties.asDict() if properties is not None else None self.assertIsInstance(sourcestamps, list) def sourceStampKey(sourceStamp): return sourceStamp.get("codebase") sourcestamps = sorted(sourcestamps, key=sourceStampKey) self.addBuildsetCalls.append(('addBuildsetForSourceStampsWithDefaults', dict(reason=reason, sourcestamps=sourcestamps, waited_for=waited_for, properties=properties, builderNames=builderNames))) return self._addBuildsetReturnValue(builderNames) def fake_addBuildsetForChanges(self, waited_for=False, reason='', external_idstring=None, changeids=None, builderNames=None, properties=None, **kw): if changeids is None: changeids = [] properties = properties.asDict() if properties is not None else None self.addBuildsetCalls.append(('addBuildsetForChanges', dict(waited_for=waited_for, reason=reason, external_idstring=external_idstring, changeids=changeids, properties=properties, builderNames=builderNames, ))) return self._addBuildsetReturnValue(builderNames) def fake_addBuildsetForSourceStamps(self, waited_for=False, sourcestamps=None, reason='', external_idstring=None, properties=None, builderNames=None, **kw): if sourcestamps is None: sourcestamps = [] properties = properties.asDict() if properties is not None else None self.assertIsInstance(sourcestamps, list) sourcestamps.sort() self.addBuildsetCalls.append(('addBuildsetForSourceStamps', dict(reason=reason, external_idstring=external_idstring, properties=properties, builderNames=builderNames, sourcestamps=sourcestamps))) return self._addBuildsetReturnValue(builderNames) buildbot-2.6.0/master/buildbot/test/util/sourcesteps.py000066400000000000000000000037421361162603000233130ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from buildbot.test.util import steps class SourceStepMixin(steps.BuildStepMixin): """ Support for testing source steps. Aside from the capabilities of L{BuildStepMixin}, this adds: - fake sourcestamps The following instance variables are available after C{setupSourceStep}, in addition to those made available by L{BuildStepMixin}: @ivar sourcestamp: fake SourceStamp for the build """ def setUpSourceStep(self): return super().setUpBuildStep() def tearDownSourceStep(self): return super().tearDownBuildStep() # utilities def setupStep(self, step, args=None, patch=None, **kwargs): """ Set up C{step} for testing. This calls L{BuildStepMixin}'s C{setupStep} and then does setup specific to a Source step. """ step = super().setupStep(step, **kwargs) if args is None: args = {} ss = self.sourcestamp = mock.Mock(name="sourcestamp") ss.ssid = 9123 ss.branch = args.get('branch', None) ss.revision = args.get('revision', None) ss.project = '' ss.repository = '' ss.patch = patch ss.patch_info = None ss.changes = [] self.build.getSourceStamp = lambda x=None: ss return step buildbot-2.6.0/master/buildbot/test/util/steps.py000066400000000000000000000403131361162603000220650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import mock from twisted.internet import defer from twisted.internet import task from twisted.python import log from twisted.python.reflect import namedModule from buildbot import interfaces from buildbot.process import buildstep from buildbot.process import remotecommand as real_remotecommand from buildbot.process.results import EXCEPTION from buildbot.test.fake import fakebuild from buildbot.test.fake import fakemaster from buildbot.test.fake import logfile from buildbot.test.fake import remotecommand from buildbot.test.fake import worker from buildbot.util import bytes2unicode def _dict_diff(d1, d2): """ Given two dictionaries describe their difference For nested dictionaries, key-paths are concatenated with the '.' operator @return The list of keys missing in d1, the list of keys missing in d2, and the differences in any nested keys """ d1_keys = set(d1.keys()) d2_keys = set(d2.keys()) both = d1_keys & d2_keys missing_in_d1 = [] missing_in_d2 = [] different = [] for k in both: if isinstance(d1[k], dict) and isinstance(d2[k], dict): missing_in_v1, missing_in_v2, different_in_v = _dict_diff( d1[k], d2[k]) missing_in_d1.extend(['{0}.{1}'.format(k, m) for m in missing_in_v1]) missing_in_d2.extend(['{0}.{1}'.format(k, m) for m in missing_in_v2]) for child_k, left, right in different_in_v: different.append(('{0}.{1}'.format(k, child_k), left, right)) continue if d1[k] != d2[k]: different.append((k, d1[k], d2[k])) missing_in_d1.extend(d2_keys - both) missing_in_d2.extend(d1_keys - both) return missing_in_d1, missing_in_d2, different def _describe_cmd_difference(exp, command): if exp.args == command.args: return "" text = "" missing_in_exp, missing_in_cmd, diff = _dict_diff(exp.args, command.args) if missing_in_exp: text += ( 'Keys in cmd missing from expectation: {0}\n'.format(missing_in_exp)) if missing_in_cmd: text += ( 'Keys in expectation missing from command: {0}\n'.format(missing_in_cmd)) if diff: formatted_diff = [ '"{0}": expected {1!r}, got {2!r}'.format(*d) for d in diff] text += ('Key differences between expectation and command: {0}\n'.format( '\n'.join(formatted_diff))) return text class BuildStepMixin: """ Support for testing build steps. This class adds two capabilities: - patch out RemoteCommand with fake versions that check expected commands and produce the appropriate results - surround a step with the mock objects that it needs to execute The following instance variables are available after C{setupStep}: @ivar step: the step under test @ivar build: the fake build containing the step @ivar progress: mock progress object @ivar worker: mock worker object @ivar properties: build properties (L{Properties} instance) """ def setUpBuildStep(self): # make an (admittedly global) reference to this test case so that # the fakes can call back to us remotecommand.FakeRemoteCommand.testcase = self for module in buildstep, real_remotecommand: self.patch(module, 'RemoteCommand', remotecommand.FakeRemoteCommand) self.patch(module, 'RemoteShellCommand', remotecommand.FakeRemoteShellCommand) self.expected_remote_commands = [] def tearDownBuildStep(self): # delete the reference added in setUp del remotecommand.FakeRemoteCommand.testcase # utilities def _getWorkerCommandVersionWrapper(self): originalGetWorkerCommandVersion = self.step.build.getWorkerCommandVersion def getWorkerCommandVersion(cmd, oldversion): return originalGetWorkerCommandVersion(cmd, oldversion) return getWorkerCommandVersion def setupStep(self, step, worker_version=None, worker_env=None, buildFiles=None, wantDefaultWorkdir=True, wantData=True, wantDb=False, wantMq=False): """ Set up C{step} for testing. This begins by using C{step} as a factory to create a I{new} step instance, thereby testing that the factory arguments are handled correctly. It then creates a comfortable environment for the worker to run in, replete with a fake build and a fake worker. As a convenience, it can set the step's workdir with C{'wkdir'}. @param worker_version: worker version to present, as a dictionary mapping command name to version. A command name of '*' will apply for all commands. @param worker_env: environment from the worker at worker startup @param wantData(bool): Set to True to add data API connector to master. Default value: True. @param wantDb(bool): Set to True to add database connector to master. Default value: False. @param wantMq(bool): Set to True to add mq connector to master. Default value: False. """ if worker_version is None: worker_version = { '*': '99.99' } if worker_env is None: worker_env = dict() if buildFiles is None: buildFiles = list() factory = interfaces.IBuildStepFactory(step) step = self.step = factory.buildStep() self.master = fakemaster.make_master(self, wantData=wantData, wantDb=wantDb, wantMq=wantMq) # mock out the reactor for updateSummary's debouncing self.debounceClock = task.Clock() self.master.reactor = self.debounceClock # set defaults if wantDefaultWorkdir: step.workdir = step._workdir or 'wkdir' # step.build b = self.build = fakebuild.FakeBuild(master=self.master) b.allFiles = lambda: buildFiles b.master = self.master def getWorkerVersion(cmd, oldversion): if cmd in worker_version: return worker_version[cmd] if '*' in worker_version: return worker_version['*'] return oldversion b.getWorkerCommandVersion = getWorkerVersion b.workerEnvironment = worker_env.copy() step.setBuild(b) # watch for properties being set self.properties = interfaces.IProperties(b) # step.progress step.progress = mock.Mock(name="progress") # step.worker self.worker = step.worker = worker.FakeWorker(self.master) self.worker.attached(None) # step overrides def addLog(name, type='s', logEncoding=None): _log = logfile.FakeLogFile(name, step) self.step.logs[name] = _log return defer.succeed(_log) step.addLog = addLog step.addLog_newStyle = addLog def addHTMLLog(name, html): _log = logfile.FakeLogFile(name, step) html = bytes2unicode(html) _log.addStdout(html) return defer.succeed(None) step.addHTMLLog = addHTMLLog def addCompleteLog(name, text): _log = logfile.FakeLogFile(name, step) self.step.logs[name] = _log _log.addStdout(text) return defer.succeed(None) step.addCompleteLog = addCompleteLog step.logobservers = self.logobservers = {} def addLogObserver(logname, observer): self.logobservers.setdefault(logname, []).append(observer) observer.step = step step.addLogObserver = addLogObserver # add any observers defined in the constructor, before this # monkey-patch for n, o in step._pendingLogObservers: addLogObserver(n, o) # expectations self.exp_result = None self.exp_state_string = None self.exp_properties = {} self.exp_missing_properties = [] self.exp_logfiles = {} self.exp_hidden = False self.exp_exception = None # check that the step's name is not None self.assertNotEqual(step.name, None) return step def expectCommands(self, *exp): """ Add to the expected remote commands, along with their results. Each argument should be an instance of L{Expect}. """ self.expected_remote_commands.extend(exp) def expectOutcome(self, result, state_string=None): """ Expect the given result (from L{buildbot.process.results}) and status text (a list). """ self.exp_result = result if state_string: self.exp_state_string = state_string def expectProperty(self, property, value, source=None): """ Expect the given property to be set when the step is complete. """ self.exp_properties[property] = (value, source) def expectNoProperty(self, property): """ Expect the given property is *not* set when the step is complete """ self.exp_missing_properties.append(property) def expectLogfile(self, logfile, contents): """ Expect a logfile with the given contents """ self.exp_logfiles[logfile] = contents def expectHidden(self, hidden): """ Set whether the step is expected to be hidden. """ self.exp_hidden = hidden def expectException(self, exception_class): """ Set whether the step is expected to raise an exception. """ self.exp_exception = exception_class self.expectOutcome(EXCEPTION) @defer.inlineCallbacks def runStep(self): """ Run the step set up with L{setupStep}, and check the results. @returns: Deferred """ self.step.build.getWorkerCommandVersion = self._getWorkerCommandVersionWrapper() self.conn = mock.Mock(name="WorkerForBuilder(connection)") self.step.setupProgress() result = yield self.step.startStep(self.conn) # finish up the debounced updateSummary before checking self.debounceClock.advance(1) if self.expected_remote_commands: log.msg("un-executed remote commands:") for rc in self.expected_remote_commands: log.msg(repr(rc)) raise AssertionError("un-executed remote commands; see logs") # in case of unexpected result, display logs in stdout for # debugging failing tests if result != self.exp_result: log.msg("unexpected result from step; dumping logs") for l in self.step.logs.values(): if l.stdout: log.msg("{0} stdout:\n{1}".format(l.name, l.stdout)) if l.stderr: log.msg("{0} stderr:\n{1}".format(l.name, l.stderr)) raise AssertionError("unexpected result; see logs") if self.exp_state_string: stepStateString = self.master.data.updates.stepStateString stepids = list(stepStateString) assert stepids, "no step state strings were set" self.assertEqual( self.exp_state_string, stepStateString[stepids[0]], "expected state_string {0!r}, got {1!r}".format( self.exp_state_string, stepStateString[stepids[0]])) for pn, (pv, ps) in self.exp_properties.items(): self.assertTrue(self.properties.hasProperty(pn), "missing property '%s'" % pn) self.assertEqual(self.properties.getProperty(pn), pv, "property '%s'" % pn) if ps is not None: self.assertEqual( self.properties.getPropertySource(pn), ps, "property {0!r} source has source {1!r}".format( pn, self.properties.getPropertySource(pn))) for pn in self.exp_missing_properties: self.assertFalse(self.properties.hasProperty(pn), "unexpected property '%s'" % pn) for l, exp in self.exp_logfiles.items(): got = self.step.logs[l].stdout if got != exp: log.msg("Unexpected log output:\n" + got) raise AssertionError("Unexpected log output; see logs") if self.exp_exception: self.assertEqual( len(self.flushLoggedErrors(self.exp_exception)), 1) # XXX TODO: hidden # self.step_status.setHidden.assert_called_once_with(self.exp_hidden) # callbacks from the running step @defer.inlineCallbacks def _validate_expectation(self, exp, command): got = (command.remote_command, command.args) for child_exp in exp.nestedExpectations(): try: yield self._validate_expectation(child_exp, command) exp.expectationPassed(exp) except AssertionError as e: # log this error, as the step may swallow the AssertionError or # otherwise obscure the failure. Trial will see the exception in # the log and print an [ERROR]. This may result in # double-reporting, but that's better than non-reporting! log.err() exp.raiseExpectationFailure(child_exp, e) if exp.shouldAssertCommandEqualExpectation(): # handle any incomparable args for arg in exp.incomparable_args: self.assertTrue(arg in got[1], "incomparable arg '%s' not received" % (arg,)) del got[1][arg] # first check any ExpectedRemoteReference instances exp_tup = (exp.remote_command, exp.args) if exp_tup != got: text = _describe_cmd_difference(exp, command) raise AssertionError( "Command contents different from expected; " + text) if exp.shouldRunBehaviors(): # let the Expect object show any behaviors that are required yield exp.runBehaviors(command) @defer.inlineCallbacks def _remotecommand_run(self, command, step, conn, builder_name): self.assertEqual(step, self.step) self.assertEqual(conn, self.conn) got = (command.remote_command, command.args) if not self.expected_remote_commands: self.fail("got command %r when no further commands were expected" % (got,)) exp = self.expected_remote_commands[0] try: yield self._validate_expectation(exp, command) exp.expectationPassed(exp) except AssertionError as e: # log this error, as the step may swallow the AssertionError or # otherwise obscure the failure. Trial will see the exception in # the log and print an [ERROR]. This may result in # double-reporting, but that's better than non-reporting! log.err() exp.raiseExpectationFailure(exp, e) finally: if not exp.shouldKeepMatchingAfter(command): self.expected_remote_commands.pop(0) return command def changeWorkerSystem(self, system): self.worker.worker_system = system if system in ['nt', 'win32']: self.build.path_module = namedModule('ntpath') self.worker.worker_basedir = '\\wrk' else: self.build.path_module = namedModule('posixpath') self.worker.worker_basedir = '/wrk' buildbot-2.6.0/master/buildbot/test/util/tuplematching.py000066400000000000000000000040631361162603000235750ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class TupleMatchingMixin: # a bunch of tuple-matching tests that all call do_test_match # this is used to test this behavior in a few places def do_test_match(self, routingKey, shouldMatch, *tuples): raise NotImplementedError def test_simple_tuple_match(self): return self.do_test_match(('abc',), True, ('abc',)) def test_simple_tuple_no_match(self): return self.do_test_match(('abc',), False, ('def',)) def test_multiple_tuple_match(self): return self.do_test_match(('a', 'b', 'c'), True, ('a', 'b', 'c')) def test_multiple_tuple_match_tuple_prefix(self): return self.do_test_match(('a', 'b', 'c'), False, ('a', 'b')) def test_multiple_tuple_match_tuple_suffix(self): return self.do_test_match(('a', 'b', 'c'), False, ('b', 'c')) def test_multiple_tuple_match_rk_prefix(self): return self.do_test_match(('a', 'b'), False, ('a', 'b', 'c')) def test_multiple_tuple_match_rk_suffix(self): return self.do_test_match(('b', 'c'), False, ('a', 'b', 'c')) def test_None_match(self): return self.do_test_match(('a', 'b', 'c'), True, ('a', None, 'c')) def test_None_match_empty(self): return self.do_test_match(('a', '', 'c'), True, ('a', None, 'c')) def test_None_no_match(self): return self.do_test_match(('a', 'b', 'c'), False, ('a', None, 'x')) buildbot-2.6.0/master/buildbot/test/util/validation.py000066400000000000000000000504701361162603000230660ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # See "Type Validation" in master/docs/developer/tests.rst import datetime import json import re from buildbot.util import UTC from buildbot.util import bytes2unicode # Base class validatorsByName = {} class Validator: name = None hasArgs = False def validate(self, name, object): raise NotImplementedError class __metaclass__(type): def __new__(mcs, name, bases, attrs): cls = type.__new__(mcs, name, bases, attrs) if 'name' in attrs and attrs['name']: assert attrs['name'] not in validatorsByName validatorsByName[attrs['name']] = cls return cls # Basic types class InstanceValidator(Validator): types = () def validate(self, name, object): if not isinstance(object, self.types): yield "{} ({!r}) is not a {}".format( name, object, self.name or repr(self.types)) class IntValidator(InstanceValidator): types = (int,) name = 'integer' class BooleanValidator(InstanceValidator): types = (bool,) name = 'boolean' class StringValidator(InstanceValidator): # strings must be unicode types = (str,) name = 'string' class BinaryValidator(InstanceValidator): types = (bytes,) name = 'bytestring' class StrValidator(InstanceValidator): types = (str,) name = 'str' class DateTimeValidator(Validator): types = (datetime.datetime,) name = 'datetime' def validate(self, name, object): if not isinstance(object, datetime.datetime): yield "{} - {!r} - is not a datetime".format(name, object) elif object.tzinfo != UTC: yield "{} is not a UTC datetime".format(name) class IdentifierValidator(Validator): types = (str,) name = 'identifier' hasArgs = True ident_re = re.compile('^[a-zA-Z\u00a0-\U0010ffff_-][a-zA-Z0-9\u00a0-\U0010ffff_-]*$', flags=re.UNICODE) def __init__(self, len): self.len = len def validate(self, name, object): if not isinstance(object, str): yield "{} - {!r} - is not a unicode string".format(name, object) elif not self.ident_re.match(object): yield "{} - {!r} - is not an identifier".format(name, object) elif not object: yield "{} - identifiers cannot be an empty string".format(name) elif len(object) > self.len: yield "{} - {!r} - is longer than {} characters".format( name, object, self.len) # Miscellaneous class NoneOk: def __init__(self, original): self.original = original def validate(self, name, object): if object is None: return else: for msg in self.original.validate(name, object): yield msg class Any: def validate(self, name, object): return # Compound Types class DictValidator(Validator): name = 'dict' def __init__(self, optionalNames=None, **keys): if optionalNames is None: optionalNames = [] self.optionalNames = set(optionalNames) self.keys = keys self.expectedNames = set(keys.keys()) def validate(self, name, object): # this uses isinstance, allowing dict subclasses as used by the DB API if not isinstance(object, dict): yield "{} ({!r}) is not a dictionary (got type {})".format( name, object, type(object)) return gotNames = set(object.keys()) unexpected = gotNames - self.expectedNames if unexpected: yield "{} has unexpected keys {}".format(name, ", ".join([repr(n) for n in unexpected])) missing = self.expectedNames - self.optionalNames - gotNames if missing: yield "{} is missing keys {}".format(name, ", ".join([repr(n) for n in missing])) for k in gotNames & self.expectedNames: for msg in self.keys[k].validate("{}[{!r}]".format(name, k), object[k]): yield msg class SequenceValidator(Validator): type = None def __init__(self, elementValidator): self.elementValidator = elementValidator def validate(self, name, object): if not isinstance(object, self.type): yield "{} ({!r}) is not a {}".format(name, object, self.name) return for idx, elt in enumerate(object): for msg in self.elementValidator.validate("{}[{}]".format(name, idx), elt): yield msg class ListValidator(SequenceValidator): type = list name = 'list' class TupleValidator(SequenceValidator): type = tuple name = 'tuple' class StringListValidator(ListValidator): name = 'string-list' def __init__(self): super().__init__(StringValidator()) class SourcedPropertiesValidator(Validator): name = 'sourced-properties' def validate(self, name, object): if not isinstance(object, dict): yield "{} is not sourced properties (not a dict)".format(name) return for k, v in object.items(): if not isinstance(k, str): yield "{} property name {!r} is not unicode".format(name, k) if not isinstance(v, tuple) or len(v) != 2: yield "{} property value for '{}' is not a 2-tuple".format(name, k) return propval, propsrc = v if not isinstance(propsrc, str): yield "{}[{}] source {!r} is not unicode".format(name, k, propsrc) try: json.dumps(propval) except (TypeError, ValueError): yield "{}[{!r}] value is not JSON-able".format(name, k) class JsonValidator(Validator): name = 'json' def validate(self, name, object): try: json.dumps(object) except (TypeError, ValueError): yield "{}[{!r}] value is not JSON-able".format(name, object) class PatchValidator(Validator): name = 'patch' validator = DictValidator( body=NoneOk(BinaryValidator()), level=NoneOk(IntValidator()), subdir=NoneOk(StringValidator()), author=NoneOk(StringValidator()), comment=NoneOk(StringValidator()), ) def validate(self, name, object): for msg in self.validator.validate(name, object): yield msg class MessageValidator(Validator): routingKeyValidator = TupleValidator(StrValidator()) def __init__(self, events, messageValidator): self.events = [bytes2unicode(e) for e in set(events)] self.messageValidator = messageValidator def validate(self, name, routingKey_message): try: routingKey, message = routingKey_message except (TypeError, ValueError) as e: yield "{!r}: not a routing key and message: {}".format(routingKey_message, e) routingKeyBad = False for msg in self.routingKeyValidator.validate("routingKey", routingKey): yield msg routingKeyBad = True if not routingKeyBad: event = routingKey[-1] if event not in self.events: yield "routing key event {!r} is not valid".format(event) for msg in self.messageValidator.validate("{} message".format(routingKey[0]), message): yield msg class Selector(Validator): def __init__(self): self.selectors = [] def add(self, selector, validator): self.selectors.append((selector, validator)) def validate(self, name, arg_object): try: arg, object = arg_object except (TypeError, ValueError) as e: yield "{!r}: not a not data options and data dict: {}".format(arg_object, e) for selector, validator in self.selectors: if selector is None or selector(arg): for msg in validator.validate(name, object): yield msg return yield "no match for selector argument {!r}".format(arg) # Type definitions message = {} dbdict = {} # parse and use a ResourceType class's dataFields into a validator # masters message['masters'] = Selector() message['masters'].add(None, MessageValidator( events=[b'started', b'stopped'], messageValidator=DictValidator( masterid=IntValidator(), name=StringValidator(), active=BooleanValidator(), # last_active is not included ))) dbdict['masterdict'] = DictValidator( id=IntValidator(), name=StringValidator(), active=BooleanValidator(), last_active=DateTimeValidator(), ) # sourcestamp _sourcestamp = dict( ssid=IntValidator(), branch=NoneOk(StringValidator()), revision=NoneOk(StringValidator()), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), created_at=DateTimeValidator(), patch=NoneOk(DictValidator( body=NoneOk(BinaryValidator()), level=NoneOk(IntValidator()), subdir=NoneOk(StringValidator()), author=NoneOk(StringValidator()), comment=NoneOk(StringValidator()))), ) message['sourcestamps'] = Selector() message['sourcestamps'].add(None, DictValidator( **_sourcestamp )) dbdict['ssdict'] = DictValidator( ssid=IntValidator(), branch=NoneOk(StringValidator()), revision=NoneOk(StringValidator()), patchid=NoneOk(IntValidator()), patch_body=NoneOk(BinaryValidator()), patch_level=NoneOk(IntValidator()), patch_subdir=NoneOk(StringValidator()), patch_author=NoneOk(StringValidator()), patch_comment=NoneOk(StringValidator()), codebase=StringValidator(), repository=StringValidator(), project=StringValidator(), created_at=DateTimeValidator(), ) # builder message['builders'] = Selector() message['builders'].add(None, MessageValidator( events=[b'started', b'stopped'], messageValidator=DictValidator( builderid=IntValidator(), masterid=IntValidator(), name=StringValidator(), ))) dbdict['builderdict'] = DictValidator( id=IntValidator(), masterids=ListValidator(IntValidator()), name=StringValidator(), description=NoneOk(StringValidator()), tags=ListValidator(StringValidator()), ) # worker dbdict['workerdict'] = DictValidator( id=IntValidator(), name=StringValidator(), configured_on=ListValidator( DictValidator( masterid=IntValidator(), builderid=IntValidator(), ) ), paused=BooleanValidator(), graceful=BooleanValidator(), connected_to=ListValidator(IntValidator()), workerinfo=JsonValidator(), ) # buildset _buildset = dict( bsid=IntValidator(), external_idstring=NoneOk(StringValidator()), reason=StringValidator(), submitted_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), results=NoneOk(IntValidator()), parent_buildid=NoneOk(IntValidator()), parent_relationship=NoneOk(StringValidator()), ) _buildsetEvents = [b'new', b'complete'] message['buildsets'] = Selector() message['buildsets'].add(lambda k: k[-1] == 'new', MessageValidator( events=_buildsetEvents, messageValidator=DictValidator( scheduler=StringValidator(), # only for 'new' sourcestamps=ListValidator( DictValidator( **_sourcestamp )), **_buildset ))) message['buildsets'].add(None, MessageValidator( events=_buildsetEvents, messageValidator=DictValidator( sourcestamps=ListValidator( DictValidator( **_sourcestamp )), **_buildset ))) dbdict['bsdict'] = DictValidator( bsid=IntValidator(), external_idstring=NoneOk(StringValidator()), reason=StringValidator(), sourcestamps=ListValidator(IntValidator()), submitted_at=DateTimeValidator(), complete=BooleanValidator(), complete_at=NoneOk(DateTimeValidator()), results=NoneOk(IntValidator()), parent_buildid=NoneOk(IntValidator()), parent_relationship=NoneOk(StringValidator()), ) # buildrequest message['buildrequests'] = Selector() message['buildrequests'].add(None, MessageValidator( events=[b'new', b'claimed', b'unclaimed'], messageValidator=DictValidator( # TODO: probably wrong! brid=IntValidator(), builderid=IntValidator(), bsid=IntValidator(), buildername=StringValidator(), ))) # change message['changes'] = Selector() message['changes'].add(None, MessageValidator( events=[b'new'], messageValidator=DictValidator( changeid=IntValidator(), parent_changeids=ListValidator(IntValidator()), author=StringValidator(), committer=StringValidator(), files=ListValidator(StringValidator()), comments=StringValidator(), revision=NoneOk(StringValidator()), when_timestamp=IntValidator(), branch=NoneOk(StringValidator()), category=NoneOk(StringValidator()), revlink=NoneOk(StringValidator()), properties=SourcedPropertiesValidator(), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), sourcestamp=DictValidator( **_sourcestamp ), ))) dbdict['chdict'] = DictValidator( changeid=IntValidator(), author=StringValidator(), committer=StringValidator(), files=ListValidator(StringValidator()), comments=StringValidator(), revision=NoneOk(StringValidator()), when_timestamp=DateTimeValidator(), branch=NoneOk(StringValidator()), category=NoneOk(StringValidator()), revlink=NoneOk(StringValidator()), properties=SourcedPropertiesValidator(), repository=StringValidator(), project=StringValidator(), codebase=StringValidator(), sourcestampid=IntValidator(), parent_changeids=ListValidator(IntValidator()), ) # changesources dbdict['changesourcedict'] = DictValidator( id=IntValidator(), name=StringValidator(), masterid=NoneOk(IntValidator()), ) # schedulers dbdict['schedulerdict'] = DictValidator( id=IntValidator(), name=StringValidator(), masterid=NoneOk(IntValidator()), enabled=BooleanValidator(), ) # builds _build = dict( buildid=IntValidator(), number=IntValidator(), builderid=IntValidator(), buildrequestid=IntValidator(), workerid=IntValidator(), masterid=IntValidator(), started_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), ) _buildEvents = [b'new', b'complete'] message['builds'] = Selector() message['builds'].add(None, MessageValidator( events=_buildEvents, messageValidator=DictValidator( **_build ))) # As build's properties are fetched at DATA API level, # a distinction shall be made as both are not equal. # Validates DB layer dbdict['dbbuilddict'] = buildbase = DictValidator( id=IntValidator(), number=IntValidator(), builderid=IntValidator(), buildrequestid=IntValidator(), workerid=IntValidator(), masterid=IntValidator(), started_at=DateTimeValidator(), complete_at=NoneOk(DateTimeValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), ) # Validates DATA API layer dbdict['builddict'] = DictValidator( properties=NoneOk(SourcedPropertiesValidator()), **buildbase.keys) # steps _step = dict( stepid=IntValidator(), number=IntValidator(), name=IdentifierValidator(50), buildid=IntValidator(), started_at=IntValidator(), complete=BooleanValidator(), complete_at=NoneOk(IntValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), urls=ListValidator(StringValidator()), hidden=BooleanValidator(), ) _stepEvents = [b'new', b'complete'] message['steps'] = Selector() message['steps'].add(None, MessageValidator( events=_stepEvents, messageValidator=DictValidator( **_step ))) dbdict['stepdict'] = DictValidator( id=IntValidator(), number=IntValidator(), name=IdentifierValidator(50), buildid=IntValidator(), started_at=DateTimeValidator(), complete_at=NoneOk(DateTimeValidator()), state_string=StringValidator(), results=NoneOk(IntValidator()), urls=ListValidator(StringValidator()), hidden=BooleanValidator(), ) # logs _log = dict( logid=IntValidator(), name=IdentifierValidator(50), stepid=IntValidator(), complete=BooleanValidator(), num_lines=IntValidator(), type=IdentifierValidator(1)) _logEvents = ['new', 'complete', 'appended'] # message['log'] dbdict['logdict'] = DictValidator( id=IntValidator(), stepid=IntValidator(), name=StringValidator(), slug=IdentifierValidator(50), complete=BooleanValidator(), num_lines=IntValidator(), type=IdentifierValidator(1)) # external functions def _verify(testcase, validator, name, object): msgs = list(validator.validate(name, object)) if msgs: msg = "; ".join(msgs) if testcase: testcase.fail(msg) else: raise AssertionError(msg) def verifyMessage(testcase, routingKey, message_): # the validator is a Selector wrapping a MessageValidator, so we need to # pass (arg, (routingKey, message)), where the routing key is the arg # the "type" of the message is identified by last path name # -1 being the event, and -2 the id. validator = message[bytes2unicode(routingKey[-3])] _verify(testcase, validator, '', (routingKey, (routingKey, message_))) def verifyDbDict(testcase, type, value): _verify(testcase, dbdict[type], type, value) def verifyData(testcase, entityType, options, value): _verify(testcase, entityType, entityType.name, value) def verifyType(testcase, name, value, validator): _verify(testcase, validator, name, value) buildbot-2.6.0/master/buildbot/test/util/warnings.py000066400000000000000000000076541361162603000225720ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Utility functions for catching Python warnings. # Twisted's TestCase already gathers produced warnings # (see t.t.u.T.flushWarnings()), however Twisted's implementation doesn't # allow fine-grained control over caught warnings. # This implementation uses context wrapper style to specify interesting # block of code to catch warnings, which allows to easily specify which # exactly statements should generate warnings and which shouldn't. # Also this implementation allows nested checks. import contextlib import re import warnings @contextlib.contextmanager def _recordWarnings(category, output): assert isinstance(output, list) unrelated_warns = [] with warnings.catch_warnings(record=True) as all_warns: # Cause all warnings of the provided category to always be # triggered. warnings.simplefilter("always", category) yield # Filter warnings. for w in all_warns: if isinstance(w.message, category): output.append(w) else: unrelated_warns.append(w) # Re-raise unrelated warnings. for w in unrelated_warns: warnings.warn_explicit(w.message, w.category, w.filename, w.lineno) @contextlib.contextmanager def assertProducesWarnings(filter_category, num_warnings=None, messages_patterns=None, message_pattern=None): if messages_patterns is not None: assert message_pattern is None assert num_warnings is None num_warnings = len(messages_patterns) else: assert num_warnings is not None or message_pattern is not None warns = [] with _recordWarnings(filter_category, warns): yield if num_warnings is not None: assert len(warns) == num_warnings, \ "Number of occurred warnings is not correct. " \ "Expected {num} warnings, received {num_received}:\n" \ "{warns}".format( num=num_warnings, num_received=len(warns), warns="\n".join(map(str, warns))) num_warnings = len(warns) if messages_patterns is None and message_pattern is not None: messages_patterns = [message_pattern] * num_warnings if messages_patterns is not None: for w, pattern in zip(warns, messages_patterns): # TODO: Maybe don't use regexp, but use simple substring check? assert re.search(pattern, str(w.message)), \ "Warning pattern doesn't match. Expected pattern:\n" \ "{pattern}\n" \ "Received message:\n" \ "{message}\n" \ "All gathered warnings:\n" \ "{warns}".format(pattern=pattern, message=w.message, warns="\n".join(map(str, warns))) @contextlib.contextmanager def assertProducesWarning(filter_category, message_pattern=None): with assertProducesWarnings(filter_category, num_warnings=1, message_pattern=message_pattern): yield @contextlib.contextmanager def assertNotProducesWarnings(filter_category): with assertProducesWarnings(filter_category, 0): yield @contextlib.contextmanager def ignoreWarning(category): with _recordWarnings(category, []): yield buildbot-2.6.0/master/buildbot/test/util/www.py000066400000000000000000000171271361162603000215620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import pkg_resources from urllib.parse import parse_qs from urllib.parse import unquote as urlunquote from uuid import uuid1 import mock from twisted.internet import defer from twisted.python.compat import NativeStringIO from twisted.web import server from buildbot.test.fake import fakemaster from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import authz class FakeSession: def __init__(self): self.user_info = {"anonymous": True} def updateSession(self, request): pass class FakeRequest: written = b'' finished = False redirected_to = None rendered_resource = None failure = None method = b'GET' path = b'/req.path' responseCode = 200 def __init__(self, path=None): self.headers = {} self.input_headers = {} self.prepath = [] x = path.split(b'?', 1) if len(x) == 1: self.path = path self.args = {} else: path, argstring = x self.path = path self.args = parse_qs(argstring, 1) self.uri = self.path self.postpath = [] for p in path[1:].split(b'/'): path = urlunquote(bytes2unicode(p)) self.postpath.append(unicode2bytes(path)) self.deferred = defer.Deferred() def write(self, data): self.written = self.written + data def redirect(self, url): self.redirected_to = url def render(self, rsrc): rendered_resource = rsrc self.deferred.callback(rendered_resource) def finish(self): self.finished = True if self.redirected_to is not None: self.deferred.callback(dict(redirected=self.redirected_to)) else: self.deferred.callback(self.written) def setResponseCode(self, code, text=None): # twisted > 16 started to assert this assert isinstance(code, int) self.responseCode = code self.responseText = text def setHeader(self, hdr, value): assert isinstance(hdr, bytes) assert isinstance(value, bytes) self.headers.setdefault(hdr, []).append(value) def getHeader(self, key): assert isinstance(key, bytes) return self.input_headers.get(key) def processingFailed(self, f): self.deferred.errback(f) def notifyFinish(self): d = defer.Deferred() @self.deferred.addBoth def finished(res): d.callback(res) return res return d def getSession(self): return self.session class RequiresWwwMixin: # mix this into a TestCase to skip if buildbot-www is not installed if not list(pkg_resources.iter_entry_points('buildbot.www', 'base')): if 'BUILDBOT_TEST_REQUIRE_WWW' in os.environ: raise RuntimeError('$BUILDBOT_TEST_REQUIRE_WWW is set but ' 'buildbot-www is not installed') skip = 'buildbot-www not installed' class WwwTestMixin(RequiresWwwMixin): UUID = str(uuid1()) def make_master(self, url=None, **kwargs): master = fakemaster.make_master(self, wantData=True) self.master = master master.www = mock.Mock() # to handle the resourceNeedsReconfigs call master.www.getUserInfos = lambda _: getattr( self.master.session, "user_info", {"anonymous": True}) cfg = dict(port=None, auth=auth.NoAuth(), authz=authz.Authz()) cfg.update(kwargs) master.config.www = cfg if url is not None: master.config.buildbotURL = url self.master.session = FakeSession() self.master.authz = cfg["authz"] self.master.authz.setMaster(self.master) return master def make_request(self, path=None, method=b'GET'): self.request = FakeRequest(path) self.request.session = self.master.session self.request.method = method return self.request def render_resource(self, rsrc, path=b'/', accept=None, method=b'GET', origin=None, access_control_request_method=None, extraHeaders=None, request=None): if not request: request = self.make_request(path, method=method) if accept: request.input_headers[b'accept'] = accept if origin: request.input_headers[b'origin'] = origin if access_control_request_method: request.input_headers[b'access-control-request-method'] = \ access_control_request_method if extraHeaders is not None: request.input_headers.update(extraHeaders) rv = rsrc.render(request) if rv != server.NOT_DONE_YET: if rv is not None: request.write(rv) request.finish() return request.deferred @defer.inlineCallbacks def render_control_resource(self, rsrc, path=b'/', params=None, requestJson=None, action="notfound", id=None, content_type=b'application/json'): # pass *either* a request or postpath if params is None: params = {} id = id or self.UUID request = self.make_request(path) request.method = b"POST" request.content = NativeStringIO(requestJson or json.dumps( {"jsonrpc": "2.0", "method": action, "params": params, "id": id})) request.input_headers = {b'content-type': content_type} rv = rsrc.render(request) if rv == server.NOT_DONE_YET: rv = yield request.deferred res = json.loads(bytes2unicode(rv)) self.assertIn("jsonrpc", res) self.assertEqual(res["jsonrpc"], "2.0") if not requestJson: # requestJson is used for invalid requests, so don't expect ID self.assertIn("id", res) self.assertEqual(res["id"], id) def assertRequest(self, content=None, contentJson=None, contentType=None, responseCode=None, contentDisposition=None, headers=None): if headers is None: headers = {} got, exp = {}, {} if content is not None: got['content'] = self.request.written exp['content'] = content if contentJson is not None: got['contentJson'] = json.loads( bytes2unicode(self.request.written)) exp['contentJson'] = contentJson if contentType is not None: got['contentType'] = self.request.headers[b'content-type'] exp['contentType'] = [contentType] if responseCode is not None: got['responseCode'] = str(self.request.responseCode) exp['responseCode'] = str(responseCode) for header, value in headers.items(): got[header] = self.request.headers.get(header) exp[header] = value self.assertEqual(got, exp) buildbot-2.6.0/master/buildbot/util/000077500000000000000000000000001361162603000173755ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/util/__init__.py000066400000000000000000000341261361162603000215140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import itertools import json import locale import re import sys import textwrap import time from builtins import bytes from urllib.parse import urlsplit from urllib.parse import urlunsplit import dateutil.tz from twisted.python import reflect from twisted.python.deprecate import deprecatedModuleAttribute from twisted.python.versions import Version from zope.interface import implementer from buildbot.interfaces import IConfigured from buildbot.util.giturlparse import giturlparse from buildbot.util.misc import deferredLocked from ._notifier import Notifier def naturalSort(array): array = array[:] def try_int(s): try: return int(s) except ValueError: return s def key_func(item): return [try_int(s) for s in re.split(r'(\d+)', item)] # prepend integer keys to each element, sort them, then strip the keys keyed_array = sorted([(key_func(i), i) for i in array]) array = [i[1] for i in keyed_array] return array def flattened_iterator(l, types=(list, tuple)): """ Generator for a list/tuple that potentially contains nested/lists/tuples of arbitrary nesting that returns every individual non-list/tuple element. In other words, [(5, 6, [8, 3]), 2, [2, 1, (3, 4)]] will yield 5, 6, 8, 3, 2, 2, 1, 3, 4 This is safe to call on something not a list/tuple - the original input is yielded. """ if not isinstance(l, types): yield l return for element in l: for sub_element in flattened_iterator(element, types): yield sub_element def flatten(l, types=(list, )): """ Given a list/tuple that potentially contains nested lists/tuples of arbitrary nesting, flatten into a single dimension. In other words, turn [(5, 6, [8, 3]), 2, [2, 1, (3, 4)]] into [5, 6, 8, 3, 2, 2, 1, 3, 4] This is safe to call on something not a list/tuple - the original input is returned as a list """ # For backwards compatibility, this returned a list, not an iterable. # Changing to return an iterable could break things. if not isinstance(l, types): return l return list(flattened_iterator(l, types)) def now(_reactor=None): if _reactor and hasattr(_reactor, "seconds"): return _reactor.seconds() return time.time() def formatInterval(eta): eta_parts = [] if eta > 3600: eta_parts.append("%d hrs" % (eta / 3600)) eta %= 3600 if eta > 60: eta_parts.append("%d mins" % (eta / 60)) eta %= 60 eta_parts.append("%d secs" % eta) return ", ".join(eta_parts) def fuzzyInterval(seconds): """ Convert time interval specified in seconds into fuzzy, human-readable form """ if seconds <= 1: return "a moment" if seconds < 20: return "{:d} seconds".format(seconds) if seconds < 55: return "{:d} seconds".format(round(seconds / 10.) * 10) minutes = round(seconds / 60.) if minutes == 1: return "a minute" if minutes < 20: return "{:d} minutes".format(minutes) if minutes < 55: return "{:d} minutes".format(round(minutes / 10.) * 10) hours = round(minutes / 60.) if hours == 1: return "an hour" if hours < 24: return "{:d} hours".format(hours) days = (hours + 6) // 24 if days == 1: return "a day" if days < 30: return "{:d} days".format(days) months = int((days + 10) / 30.5) if months == 1: return "a month" if months < 12: return "{} months".format(months) years = round(days / 365.25) if years == 1: return "a year" return "{} years".format(years) @implementer(IConfigured) class ComparableMixin: compare_attrs = () class _None: pass def __hash__(self): compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) alist = [self.__class__] + \ [getattr(self, name, self._None) for name in compare_attrs] return hash(tuple(map(str, alist))) def _cmp_common(self, them): if type(self) != type(them): return (False, None, None) if self.__class__ != them.__class__: return (False, None, None) compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) self_list = [getattr(self, name, self._None) for name in compare_attrs] them_list = [getattr(them, name, self._None) for name in compare_attrs] return (True, self_list, them_list) def __eq__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list == them_list def __ne__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return True return self_list != them_list def __lt__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list < them_list def __le__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list <= them_list def __gt__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list > them_list def __ge__(self, them): (isComparable, self_list, them_list) = self._cmp_common(them) if not isComparable: return False return self_list >= them_list def getConfigDict(self): compare_attrs = [] reflect.accumulateClassList( self.__class__, 'compare_attrs', compare_attrs) return {k: getattr(self, k) for k in compare_attrs if hasattr(self, k) and k not in ("passwd", "password")} def diffSets(old, new): if not isinstance(old, set): old = set(old) if not isinstance(new, set): new = set(new) return old - new, new - old # Remove potentially harmful characters from builder name if it is to be # used as the build dir. badchars_map = bytes.maketrans(b"\t !#$%&'()*+,./:;<=>?@[\\]^{|}~", b"______________________________") def safeTranslate(s): if isinstance(s, str): s = s.encode('utf8') return s.translate(badchars_map) def none_or_str(x): if x is not None and not isinstance(x, str): return str(x) return x def unicode2bytes(x, encoding='utf-8', errors='strict'): if isinstance(x, str): x = x.encode(encoding, errors) return x def bytes2unicode(x, encoding='utf-8', errors='strict'): if isinstance(x, (str, type(None))): return x return str(x, encoding, errors) _hush_pyflakes = [json] deprecatedModuleAttribute( Version("buildbot", 0, 9, 4), message="Use json from the standard library instead.", moduleName="buildbot.util", name="json", ) def toJson(obj): if isinstance(obj, datetime.datetime): return datetime2epoch(obj) # changes and schedulers consider None to be a legitimate name for a branch, # which makes default function keyword arguments hard to handle. This value # is always false. class NotABranch: def __bool__(self): return False NotABranch = NotABranch() # time-handling methods # this used to be a custom class; now it's just an instance of dateutil's class UTC = dateutil.tz.tzutc() def epoch2datetime(epoch): """Convert a UNIX epoch time to a datetime object, in the UTC timezone""" if epoch is not None: return datetime.datetime.fromtimestamp(epoch, tz=UTC) return None def datetime2epoch(dt): """Convert a non-naive datetime object to a UNIX epoch timestamp""" if dt is not None: return calendar.timegm(dt.utctimetuple()) return None # TODO: maybe "merge" with formatInterval? def human_readable_delta(start, end): """ Return a string of human readable time delta. """ start_date = datetime.datetime.fromtimestamp(start) end_date = datetime.datetime.fromtimestamp(end) delta = end_date - start_date result = [] if delta.days > 0: result.append('%d days' % (delta.days,)) if delta.seconds > 0: hours = int(delta.seconds / 3600) if hours > 0: result.append('%d hours' % (hours,)) minutes = int((delta.seconds - hours * 3600) / 60) if minutes: result.append('%d minutes' % (minutes,)) seconds = delta.seconds % 60 if seconds > 0: result.append('%d seconds' % (seconds,)) if result: return ', '.join(result) return 'super fast' def makeList(input): if isinstance(input, str): return [input] elif input is None: return [] return list(input) def in_reactor(f): """decorate a function by running it with maybeDeferred in a reactor""" def wrap(*args, **kwargs): from twisted.internet import reactor, defer result = [] def _async(): d = defer.maybeDeferred(f, *args, **kwargs) @d.addErrback def eb(f): f.printTraceback(file=sys.stderr) @d.addBoth def do_stop(r): result.append(r) reactor.stop() reactor.callWhenRunning(_async) reactor.run() return result[0] wrap.__doc__ = f.__doc__ wrap.__name__ = f.__name__ wrap._orig = f # for tests return wrap def string2boolean(str): return { b'on': True, b'true': True, b'yes': True, b'1': True, b'off': False, b'false': False, b'no': False, b'0': False, }[str.lower()] def asyncSleep(delay, reactor=None): from twisted.internet import defer from twisted.internet import reactor as internet_reactor if reactor is None: reactor = internet_reactor d = defer.Deferred() reactor.callLater(delay, d.callback, None) return d def check_functional_environment(config): try: locale.getdefaultlocale() except (KeyError, ValueError) as e: config.error("\n".join([ "Your environment has incorrect locale settings. This means python cannot handle strings safely.", " Please check 'LANG', 'LC_CTYPE', 'LC_ALL' and 'LANGUAGE'" " are either unset or set to a valid locale.", str(e) ])) _netloc_url_re = re.compile(r':[^@]*@') def stripUrlPassword(url): parts = list(urlsplit(url)) parts[1] = _netloc_url_re.sub(':xxxx@', parts[1]) return urlunsplit(parts) def join_list(maybeList): if isinstance(maybeList, (list, tuple)): return ' '.join(bytes2unicode(s) for s in maybeList) return bytes2unicode(maybeList) def command_to_string(command): words = command if isinstance(words, (bytes, str)): words = words.split() try: len(words) except (AttributeError, TypeError): # WithProperties and Property don't have __len__ # For old-style classes instances AttributeError raised, # for new-style classes instances - TypeError. return None # flatten any nested lists words = flatten(words, (list, tuple)) # strip instances and other detritus (which can happen if a # description is requested before rendering) stringWords = [] for w in words: if isinstance(w, (bytes, str)): # If command was bytes, be gentle in # trying to covert it. w = bytes2unicode(w, errors="replace") stringWords.append(w) words = stringWords if not words: return None if len(words) < 3: rv = "'%s'" % (' '.join(words)) else: rv = "'%s ...'" % (' '.join(words[:2])) return rv def rewrap(text, width=None): """ Rewrap text for output to the console. Removes common indentation and rewraps paragraphs according to the console width. Line feeds between paragraphs preserved. Formatting of paragraphs that starts with additional indentation preserved. """ if width is None: width = 80 # Remove common indentation. text = textwrap.dedent(text) def needs_wrapping(line): # Line always non-empty. return not line[0].isspace() # Split text by lines and group lines that comprise paragraphs. wrapped_text = "" for do_wrap, lines in itertools.groupby(text.splitlines(True), key=needs_wrapping): paragraph = ''.join(lines) if do_wrap: paragraph = textwrap.fill(paragraph, width) wrapped_text += paragraph return wrapped_text def dictionary_merge(a, b): """merges dictionary b into a Like dict.update, but recursive """ for key, value in b.items(): if key in a and isinstance(a[key], dict) and isinstance(value, dict): dictionary_merge(a[key], b[key]) continue a[key] = b[key] return a __all__ = [ 'naturalSort', 'now', 'formatInterval', 'ComparableMixin', 'safeTranslate', 'none_or_str', 'NotABranch', 'deferredLocked', 'UTC', 'diffSets', 'makeList', 'in_reactor', 'string2boolean', 'check_functional_environment', 'human_readable_delta', 'rewrap', 'Notifier', "giturlparse", ] buildbot-2.6.0/master/buildbot/util/_notifier.py000066400000000000000000000027461361162603000217360ustar00rootroot00000000000000# Copyright Buildbot Team Members # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from twisted.internet.defer import Deferred class Notifier: def __init__(self): self._waiters = [] def wait(self): d = Deferred() self._waiters.append(d) return d def notify(self, result): waiters, self._waiters = self._waiters, [] for waiter in waiters: waiter.callback(result) def __bool__(self): return bool(self._waiters) buildbot-2.6.0/master/buildbot/util/bbcollections.py000066400000000000000000000025631361162603000225770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # this is here for compatibility from collections import defaultdict assert defaultdict class KeyedSets: def __init__(self): self.d = dict() def add(self, key, value): if key not in self.d: self.d[key] = set() self.d[key].add(value) def discard(self, key, value): if key in self.d: self.d[key].discard(value) if not self.d[key]: del self.d[key] def __contains__(self, key): return key in self.d def __getitem__(self, key): return self.d.get(key, set()) def pop(self, key): if key in self.d: return self.d.pop(key) return set() buildbot-2.6.0/master/buildbot/util/codebase.py000066400000000000000000000032571361162603000215230ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer class AbsoluteSourceStampsMixin: # record changes and revisions per codebase _lastCodebases = None @defer.inlineCallbacks def getCodebaseDict(self, codebase): assert self.codebases if self._lastCodebases is None: self._lastCodebases = yield self.getState('lastCodebases', {}) # may fail with KeyError return self._lastCodebases.get(codebase, self.codebases[codebase]) @defer.inlineCallbacks def recordChange(self, change): codebase = yield self.getCodebaseDict(change.codebase) lastChange = codebase.get('lastChange', -1) if change.number > lastChange: self._lastCodebases[change.codebase] = { 'repository': change.repository, 'branch': change.branch, 'revision': change.revision, 'lastChange': change.number } yield self.setState('lastCodebases', self._lastCodebases) buildbot-2.6.0/master/buildbot/util/config.py000066400000000000000000000044051361162603000212170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.cred.checkers import FilePasswordDB from twisted.python.components import registerAdapter from zope.interface import implementer from buildbot.interfaces import IConfigured @implementer(IConfigured) class _DefaultConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return self.value registerAdapter(_DefaultConfigured, object, IConfigured) @implementer(IConfigured) class _ListConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return [IConfigured(e).getConfigDict() for e in self.value] registerAdapter(_ListConfigured, list, IConfigured) @implementer(IConfigured) class _DictConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return {k: IConfigured(v).getConfigDict() for k, v in self.value.items()} registerAdapter(_DictConfigured, dict, IConfigured) @implementer(IConfigured) class _SREPatternConfigured: def __init__(self, value): self.value = value def getConfigDict(self): return dict(name="re", pattern=self.value.pattern) registerAdapter(_SREPatternConfigured, type(re.compile("")), IConfigured) @implementer(IConfigured) class ConfiguredMixin: def getConfigDict(self): return {'name': self.name} @implementer(IConfigured) class _FilePasswordDBConfigured: def __init__(self, value): pass def getConfigDict(self): return {'type': 'file'} registerAdapter(_FilePasswordDBConfigured, FilePasswordDB, IConfigured) buildbot-2.6.0/master/buildbot/util/croniter.py000066400000000000000000000242521361162603000216010ustar00rootroot00000000000000#!/usr/bin/python # Copied from croniter # https://github.com/taichino/croniter # Licensed under MIT license # Pyflakes warnings corrected # -*- coding: utf-8 -*- import re from datetime import datetime from time import mktime from time import time from dateutil.relativedelta import relativedelta search_re = re.compile(r'^([^-]+)-([^-/]+)(/(.*))?$') only_int_re = re.compile(r'^\d+$') any_int_re = re.compile(r'^\d+') star_or_int_re = re.compile(r'^(\d+|\*)$') __all__ = ('croniter',) class croniter: RANGES = ( (0, 59), (0, 23), (1, 31), (1, 12), (0, 6), (0, 59) ) DAYS = ( 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 ) ALPHACONV = ( {}, {}, {}, {'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12}, {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 0}, {} ) LOWMAP = ( {}, {}, {0: 1}, {0: 1}, {7: 0}, {}, ) bad_length = 'Exactly 5 or 6 columns has to be specified for iterator' \ 'expression.' def __init__(self, expr_format, start_time=time()): if isinstance(start_time, datetime): start_time = mktime(start_time.timetuple()) self.cur = start_time self.exprs = expr_format.split() if len(self.exprs) != 5 and len(self.exprs) != 6: raise ValueError(self.bad_length) expanded = [] for i, expr in enumerate(self.exprs): e_list = expr.split(',') res = [] while e_list: e = e_list.pop() t = re.sub(r'^\*(/.+)$', r'%d-%d\1' % (self.RANGES[i][0], self.RANGES[i][1]), str(e)) m = search_re.search(t) if m: (low, high, step) = m.group(1), m.group(2), m.group(4) or 1 if not any_int_re.search(low): low = self.ALPHACONV[i][low.lower()] if not any_int_re.search(high): high = self.ALPHACONV[i][high.lower()] if (not low or not high or int(low) > int(high) or not only_int_re.search(str(step))): raise ValueError( "[%s] is not acceptable" % expr_format) for j in range(int(low), int(high) + 1): if j % int(step) == 0: e_list.append(j) else: if not star_or_int_re.search(t): t = self.ALPHACONV[i][t.lower()] try: t = int(t) except (ValueError, TypeError): pass if t in self.LOWMAP[i]: t = self.LOWMAP[i][t] if t != '*' and (int(t) < self.RANGES[i][0] or int(t) > self.RANGES[i][1]): raise ValueError( "[%s] is not acceptable, out of range" % expr_format) res.append(t) res.sort() expanded.append( ['*'] if (len(res) == 1 and res[0] == '*') else res) self.expanded = expanded def get_next(self, ret_type=float): return self._get_next(ret_type, is_prev=False) def get_prev(self, ret_type=float): return self._get_next(ret_type, is_prev=True) def _get_next(self, ret_type=float, is_prev=False): expanded = self.expanded[:] if ret_type not in (float, datetime): raise TypeError("Invalid ret_type, only 'float' or 'datetime' " "is acceptable.") if expanded[2][0] != '*' and expanded[4][0] != '*': bak = expanded[4] expanded[4] = ['*'] t1 = self._calc(self.cur, expanded, is_prev) expanded[4] = bak expanded[2] = ['*'] t2 = self._calc(self.cur, expanded, is_prev) if not is_prev: result = t1 if t1 < t2 else t2 else: result = t1 if t1 > t2 else t2 else: result = self._calc(self.cur, expanded, is_prev) self.cur = result if ret_type == datetime: result = datetime.fromtimestamp(result) return result def _calc(self, now, expanded, is_prev): if is_prev: nearest_diff_method = self._get_prev_nearest_diff sign = -1 else: nearest_diff_method = self._get_next_nearest_diff sign = 1 offset = 1 if len(expanded) == 6 else 60 dst = now = datetime.fromtimestamp(now + sign * offset) # BUILDBOT: unused 'day' omitted due to pyflakes warning month, year = dst.month, dst.year current_year = now.year DAYS = self.DAYS def proc_month(d): if expanded[3][0] != '*': diff_month = nearest_diff_method(d.month, expanded[3], 12) days = DAYS[month - 1] if month == 2 and self.is_leap(year): days += 1 reset_day = days if is_prev else 1 if diff_month is not None and diff_month != 0: if is_prev: d += relativedelta(months=diff_month) else: d += relativedelta(months=diff_month, day=reset_day, hour=0, minute=0, second=0) return True, d return False, d def proc_day_of_month(d): if expanded[2][0] != '*': days = DAYS[month - 1] if month == 2 and self.is_leap(year): days += 1 diff_day = nearest_diff_method(d.day, expanded[2], days) if diff_day is not None and diff_day != 0: if is_prev: d += relativedelta(days=diff_day) else: d += relativedelta(days=diff_day, hour=0, minute=0, second=0) return True, d return False, d def proc_day_of_week(d): if expanded[4][0] != '*': diff_day_of_week = nearest_diff_method( d.isoweekday() % 7, expanded[4], 7) if diff_day_of_week is not None and diff_day_of_week != 0: if is_prev: d += relativedelta(days=diff_day_of_week) else: d += relativedelta(days=diff_day_of_week, hour=0, minute=0, second=0) return True, d return False, d def proc_hour(d): if expanded[1][0] != '*': diff_hour = nearest_diff_method(d.hour, expanded[1], 24) if diff_hour is not None and diff_hour != 0: if is_prev: d += relativedelta(hours=diff_hour) else: d += relativedelta(hours=diff_hour, minute=0, second=0) return True, d return False, d def proc_minute(d): if expanded[0][0] != '*': diff_min = nearest_diff_method(d.minute, expanded[0], 60) if diff_min is not None and diff_min != 0: if is_prev: d += relativedelta(minutes=diff_min) else: d += relativedelta(minutes=diff_min, second=0) return True, d return False, d def proc_second(d): if len(expanded) == 6: if expanded[5][0] != '*': diff_sec = nearest_diff_method(d.second, expanded[5], 60) if diff_sec is not None and diff_sec != 0: d += relativedelta(seconds=diff_sec) return True, d else: d += relativedelta(second=0) return False, d if is_prev: procs = [proc_second, proc_minute, proc_hour, proc_day_of_week, proc_day_of_month, proc_month] else: procs = [proc_month, proc_day_of_month, proc_day_of_week, proc_hour, proc_minute, proc_second] while abs(year - current_year) <= 1: next = False for proc in procs: (changed, dst) = proc(dst) if changed: next = True break if next: continue return mktime(dst.timetuple()) raise("failed to find prev date") def _get_next_nearest(self, x, to_check): small = [item for item in to_check if item < x] large = [item for item in to_check if item >= x] large.extend(small) return large[0] def _get_prev_nearest(self, x, to_check): small = [item for item in to_check if item <= x] large = [item for item in to_check if item > x] small.reverse() large.reverse() small.extend(large) return small[0] def _get_next_nearest_diff(self, x, to_check, range_val): for i, d in enumerate(to_check): if d >= x: return d - x return to_check[0] - x + range_val def _get_prev_nearest_diff(self, x, to_check, range_val): candidates = to_check[:] candidates.reverse() for d in candidates: if d <= x: return d - x return (candidates[0]) - x - range_val def is_leap(self, year): return year % 400 == 0 or (year % 4 == 0 and year % 100 != 0) if __name__ == '__main__': base = datetime(2010, 1, 25) itr = croniter('0 0 1 * *', base) n1 = itr.get_next(datetime) print(n1) buildbot-2.6.0/master/buildbot/util/debounce.py000066400000000000000000000073721361162603000215440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import functools from twisted.internet import defer from twisted.python import log # debounce phases PH_IDLE = 0 PH_WAITING = 1 PH_RUNNING = 2 PH_RUNNING_QUEUED = 3 class Debouncer: __slots__ = ['phase', 'timer', 'wait', 'function', 'stopped', 'completeDeferreds', 'get_reactor'] def __init__(self, wait, function, get_reactor): # time to wait self.wait = wait # zero-argument callable to invoke self.function = function # current phase self.phase = PH_IDLE # Twisted timer for waiting self.timer = None # true if this instance is stopped self.stopped = False # deferreds to fire when the call is complete self.completeDeferreds = None # for tests self.get_reactor = get_reactor def __call__(self): if self.stopped: return phase = self.phase if phase == PH_IDLE: self.timer = self.get_reactor().callLater(self.wait, self.invoke) self.phase = PH_WAITING elif phase == PH_RUNNING: self.phase = PH_RUNNING_QUEUED else: # phase == PH_WAITING or phase == PH_RUNNING_QUEUED: pass def __repr__(self): return "" % (self.function, self.wait, self.phase) def invoke(self): self.phase = PH_RUNNING self.completeDeferreds = [] d = defer.maybeDeferred(self.function) d.addErrback(log.err, 'from debounced function:') @d.addCallback def retry(_): queued = self.phase == PH_RUNNING_QUEUED self.phase = PH_IDLE while self.completeDeferreds: self.completeDeferreds.pop(0).callback(None) if queued: self.__call__() def start(self): self.stopped = False def stop(self): self.stopped = True if self.phase == PH_WAITING: self.timer.cancel() self.invoke() # fall through with PH_RUNNING if self.phase in (PH_RUNNING, PH_RUNNING_QUEUED): d = defer.Deferred() self.completeDeferreds.append(d) return d return defer.succeed(None) class _Descriptor: def __init__(self, fn, wait, attrName, get_reactor): self.fn = fn self.wait = wait self.attrName = attrName self.get_reactor = get_reactor def __get__(self, instance, cls): try: db = getattr(instance, self.attrName) except AttributeError: db = Debouncer(self.wait, functools.partial(self.fn, instance), functools.partial(self.get_reactor, instance)) setattr(instance, self.attrName, db) return db def _get_reactor_from_master(o): return o.master.reactor def method(wait, get_reactor=_get_reactor_from_master): def wrap(fn): stateName = "__debounce_" + fn.__name__ + "__" return _Descriptor(fn, wait, stateName, get_reactor) return wrap buildbot-2.6.0/master/buildbot/util/deferwaiter.py000066400000000000000000000061321361162603000222520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.util import Notifier class DeferWaiter: """ This class manages a set of Deferred objects and allows waiting for their completion """ def __init__(self): self._waited = set() self._finish_notifier = Notifier() def _finished(self, _, d): self._waited.remove(id(d)) if not self._waited: self._finish_notifier.notify(None) def add(self, d): if not isinstance(d, defer.Deferred): return self._waited.add(id(d)) d.addBoth(self._finished, d) @defer.inlineCallbacks def wait(self): if not self._waited: return yield self._finish_notifier.wait() class RepeatedActionHandler: """ This class handles a repeated action such as submitting keepalive requests. It integrates with DeferWaiter to correctly control shutdown of such process. """ def __init__(self, reactor, waiter, interval, action, start_timer_after_action_completes=False): self._reactor = reactor self._waiter = waiter self._interval = interval self._action = action self._enabled = False self._timer = None self._start_timer_after_action_completes = start_timer_after_action_completes def setInterval(self, interval): self._interval = interval def start(self): if self._enabled: return self._enabled = True self._start_timer() def stop(self): if not self._enabled: return self._enabled = False if self._timer and self._timer.active(): self._timer.cancel() self._timer = None def _start_timer(self): self._timer = self._reactor.callLater(self._interval, self._handle_timeout) @defer.inlineCallbacks def _do_action(self): try: yield self._action() except Exception as e: log.err(e, 'Got exception in RepeatedActionHandler') def _handle_timeout(self): self._waiter.add(self._handle_action()) @defer.inlineCallbacks def _handle_action(self): if self._start_timer_after_action_completes: yield self._do_action() if self._enabled: self._start_timer() if not self._start_timer_after_action_completes: yield self._do_action() buildbot-2.6.0/master/buildbot/util/eventual.py000066400000000000000000000053271361162603000216010ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # copied from foolscap from twisted.internet import defer from twisted.internet import reactor from twisted.python import log class _SimpleCallQueue: _reactor = reactor def __init__(self): self._events = [] self._flushObservers = [] self._timer = None self._in_turn = False def append(self, cb, args, kwargs): self._events.append((cb, args, kwargs)) if not self._timer: self._timer = self._reactor.callLater(0, self._turn) def _turn(self): self._timer = None self._in_turn = True # flush all the messages that are currently in the queue. If anything # gets added to the queue while we're doing this, those events will # be put off until the next turn. events, self._events = self._events, [] for cb, args, kwargs in events: try: cb(*args, **kwargs) except Exception: log.err() self._in_turn = False if self._events and not self._timer: self._timer = self._reactor.callLater(0, self._turn) if not self._events: observers, self._flushObservers = self._flushObservers, [] for o in observers: o.callback(None) def flush(self): if not self._events and not self._in_turn: return defer.succeed(None) d = defer.Deferred() self._flushObservers.append(d) return d _theSimpleQueue = _SimpleCallQueue() def eventually(cb, *args, **kwargs): _theSimpleQueue.append(cb, args, kwargs) def fireEventually(value=None): d = defer.Deferred() eventually(d.callback, value) return d def flushEventualQueue(_ignored=None): return _theSimpleQueue.flush() def _setReactor(r=None): # This sets the reactor used to schedule future events to r. If r is None # (the default), the reactor is reset to its default value. # This should only be used for unit tests. if r is None: r = reactor _theSimpleQueue._reactor = r buildbot-2.6.0/master/buildbot/util/git.py000066400000000000000000000302171361162603000205350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from distutils.version import LooseVersion from twisted.internet import defer from twisted.python import log from buildbot import config from buildbot.process import buildstep from buildbot.process import remotecommand from buildbot.process.properties import Properties RC_SUCCESS = 0 def getSshArgsForKeys(keyPath, knownHostsPath): args = [] if keyPath is not None: args += ['-i', keyPath] if knownHostsPath is not None: args += ['-o', 'UserKnownHostsFile={0}'.format(knownHostsPath)] return args def escapeShellArgIfNeeded(arg): if re.match(r"^[a-zA-Z0-9_-]+$", arg): return arg return '"{0}"'.format(arg) def getSshCommand(keyPath, knownHostsPath): command = ['ssh'] + getSshArgsForKeys(keyPath, knownHostsPath) command = [escapeShellArgIfNeeded(arg) for arg in command] return ' '.join(command) class GitMixin: def setupGit(self, logname=None): if logname is None: logname = 'GitMixin' if self.sshHostKey is not None and self.sshPrivateKey is None: config.error('{}: sshPrivateKey must be provided in order use sshHostKey'.format( logname)) if self.sshKnownHosts is not None and self.sshPrivateKey is None: config.error('{}: sshPrivateKey must be provided in order use sshKnownHosts'.format( logname)) if self.sshHostKey is not None and self.sshKnownHosts is not None: config.error('{}: only one of sshPrivateKey and sshHostKey can be provided'.format( logname)) self.gitInstalled = False self.supportsBranch = False self.supportsProgress = False self.supportsSubmoduleForce = False self.supportsSubmoduleCheckout = False self.supportsSshPrivateKeyAsEnvOption = False self.supportsSshPrivateKeyAsConfigOption = False def parseGitFeatures(self, version_stdout): if 'git' not in version_stdout: return try: version = version_stdout.strip().split(' ')[2] except IndexError: return self.gitInstalled = True if LooseVersion(version) >= LooseVersion("1.6.5"): self.supportsBranch = True if LooseVersion(version) >= LooseVersion("1.7.2"): self.supportsProgress = True if LooseVersion(version) >= LooseVersion("1.7.6"): self.supportsSubmoduleForce = True if LooseVersion(version) >= LooseVersion("1.7.8"): self.supportsSubmoduleCheckout = True if LooseVersion(version) >= LooseVersion("2.3.0"): self.supportsSshPrivateKeyAsEnvOption = True if LooseVersion(version) >= LooseVersion("2.10.0"): self.supportsSshPrivateKeyAsConfigOption = True def adjustCommandParamsForSshPrivateKey(self, command, env, keyPath, sshWrapperPath=None, knownHostsPath=None): ssh_command = getSshCommand(keyPath, knownHostsPath) if self.supportsSshPrivateKeyAsConfigOption: command.append('-c') command.append('core.sshCommand={0}'.format(ssh_command)) elif self.supportsSshPrivateKeyAsEnvOption: env['GIT_SSH_COMMAND'] = ssh_command else: if sshWrapperPath is None: raise Exception('Only SSH wrapper script is supported but path ' 'not given') env['GIT_SSH'] = sshWrapperPath def getSshWrapperScriptContents(keyPath, knownHostsPath=None): ssh_command = getSshCommand(keyPath, knownHostsPath) # note that this works on windows if using git with MINGW embedded. return '#!/bin/sh\n{0} "$@"\n'.format(ssh_command) def getSshKnownHostsContents(hostKey): host_name = '*' return '{0} {1}'.format(host_name, hostKey) class GitStepMixin(GitMixin): def setupGitStep(self): self.didDownloadSshPrivateKey = False self.setupGit(logname='Git') if not self.repourl: config.error("Git: must provide repourl.") def _isSshPrivateKeyNeededForGitCommand(self, command): if not command or self.sshPrivateKey is None: return False gitCommandsThatNeedSshKey = [ 'clone', 'submodule', 'fetch', 'push' ] if command[0] in gitCommandsThatNeedSshKey: return True return False def _getSshDataPath(self): # we can't use the workdir for temporary ssh-related files, because # it's needed when cloning repositories and git does not like the # destination directory being non-empty. We have to use separate # temporary directory for that data to ensure the confidentiality of it. # So instead of # '{path}/{to}/{workdir}/.buildbot-ssh-key' we put the key at # '{path}/{to}/.{builder_name}.{workdir}.buildbot/ssh-key'. # basename and dirname interpret the last element being empty for paths # ending with a slash path_module = self.build.path_module workdir = self._getSshDataWorkDir().rstrip('/\\') if path_module.isabs(workdir): parent_path = path_module.dirname(workdir) else: parent_path = path_module.join(self.worker.worker_basedir, path_module.dirname(workdir)) basename = '.{0}.{1}.buildbot'.format(self.build.builder.name, path_module.basename(workdir)) return path_module.join(parent_path, basename) def _getSshPrivateKeyPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-key') def _getSshHostKeyPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-known-hosts') def _getSshWrapperScriptPath(self, ssh_data_path): return self.build.path_module.join(ssh_data_path, 'ssh-wrapper.sh') def _adjustCommandParamsForSshPrivateKey(self, full_command, full_env): ssh_data_path = self._getSshDataPath() key_path = self._getSshPrivateKeyPath(ssh_data_path) ssh_wrapper_path = self._getSshWrapperScriptPath(ssh_data_path) host_key_path = None if self.sshHostKey is not None or self.sshKnownHosts is not None: host_key_path = self._getSshHostKeyPath(ssh_data_path) self.adjustCommandParamsForSshPrivateKey(full_command, full_env, key_path, ssh_wrapper_path, host_key_path) @defer.inlineCallbacks def _dovccmd(self, command, abandonOnFailure=True, collectStdout=False, initialStdin=None): full_command = ['git'] full_env = self.env.copy() if self.env else {} if self.config is not None: for name, value in self.config.items(): full_command.append('-c') full_command.append('%s=%s' % (name, value)) if self._isSshPrivateKeyNeededForGitCommand(command): self._adjustCommandParamsForSshPrivateKey(full_command, full_env) full_command.extend(command) # check for the interruptSignal flag sigtermTime = None interruptSignal = None # If possible prefer to send a SIGTERM to git before we send a SIGKILL. # If we send a SIGKILL, git is prone to leaving around stale lockfiles. # By priming it with a SIGTERM first we can ensure that it has a chance to shut-down gracefully # before getting terminated if not self.workerVersionIsOlderThan("shell", "2.16"): # git should shut-down quickly on SIGTERM. If it doesn't don't let it # stick around for too long because this is on top of any timeout # we have hit. sigtermTime = 1 else: # Since sigtermTime is unavailable try to just use SIGTERM by itself instead of # killing. This should be safe. if self.workerVersionIsOlderThan("shell", "2.15"): log.msg( "NOTE: worker does not allow master to specify " "interruptSignal. This may leave a stale lockfile around " "if the command is interrupted/times out\n") else: interruptSignal = 'TERM' cmd = remotecommand.RemoteShellCommand(self.workdir, full_command, env=full_env, logEnviron=self.logEnviron, timeout=self.timeout, sigtermTime=sigtermTime, interruptSignal=interruptSignal, collectStdout=collectStdout, initialStdin=initialStdin) cmd.useLog(self.stdio_log, False) yield self.runCommand(cmd) if abandonOnFailure and cmd.didFail(): log.msg("Source step failed while running command %s" % cmd) raise buildstep.BuildStepFailed() if collectStdout: return cmd.stdout return cmd.rc @defer.inlineCallbacks def checkFeatureSupport(self): stdout = yield self._dovccmd(['--version'], collectStdout=True) self.parseGitFeatures(stdout) return self.gitInstalled @defer.inlineCallbacks def _downloadSshPrivateKeyIfNeeded(self): if self.sshPrivateKey is None: return RC_SUCCESS p = Properties() p.master = self.master private_key = yield p.render(self.sshPrivateKey) host_key = yield p.render(self.sshHostKey) known_hosts_contents = yield p.render(self.sshKnownHosts) # not using self.workdir because it may be changed depending on step # options workdir = self._getSshDataWorkDir() ssh_data_path = self._getSshDataPath() yield self.runMkdir(ssh_data_path) if not self.supportsSshPrivateKeyAsEnvOption: script_path = self._getSshWrapperScriptPath(ssh_data_path) script_contents = getSshWrapperScriptContents( self._getSshPrivateKeyPath(ssh_data_path)) yield self.downloadFileContentToWorker(script_path, script_contents, workdir=workdir, mode=0o700) private_key_path = self._getSshPrivateKeyPath(ssh_data_path) yield self.downloadFileContentToWorker(private_key_path, private_key, workdir=workdir, mode=0o400) if self.sshHostKey is not None or self.sshKnownHosts is not None: known_hosts_path = self._getSshHostKeyPath(ssh_data_path) if self.sshHostKey is not None: known_hosts_contents = getSshKnownHostsContents(host_key) yield self.downloadFileContentToWorker(known_hosts_path, known_hosts_contents, workdir=workdir, mode=0o400) self.didDownloadSshPrivateKey = True return RC_SUCCESS @defer.inlineCallbacks def _removeSshPrivateKeyIfNeeded(self): if not self.didDownloadSshPrivateKey: return RC_SUCCESS yield self.runRmdir(self._getSshDataPath()) return RC_SUCCESS buildbot-2.6.0/master/buildbot/util/giturlparse.py000066400000000000000000000032401361162603000223070ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from collections import namedtuple # The regex is matching more than it should and is not intended to be an url validator. # It is intended to efficiently and reliably extract information from the various examples # that are described in the unit tests. _giturlmatcher = re.compile( r'(?P(https?://|ssh://|git://|))' r'((?P.*)@)?' r'(?P[^\/:]+)(:((?P[0-9]+)/)?|/)' r'((?P.+)/)?(?P[^/]+?)(\.git)?$') GitUrl = namedtuple('GitUrl', ['proto', 'user', 'domain', 'port', 'owner', 'repo']) def giturlparse(url): res = _giturlmatcher.match(url) if res is None: return None port = res.group("port") if port is not None: port = int(port) proto = res.group("proto") if proto: proto = proto[:-3] else: proto = 'ssh' # implicit proto is ssh return GitUrl(proto, res.group('user'), res.group("domain"), port, res.group('owner'), res.group('repo')) buildbot-2.6.0/master/buildbot/util/httpclientservice.py000066400000000000000000000164651361162603000235220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can) # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json as jsonmodule import textwrap from twisted.internet import defer from twisted.web.client import Agent from twisted.web.client import HTTPConnectionPool from zope.interface import implementer from buildbot import config from buildbot.interfaces import IHttpResponse from buildbot.util import service from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.util.logger import Logger try: import txrequests except ImportError: txrequests = None try: import treq implementer(IHttpResponse)(treq.response._Response) except ImportError: treq = None log = Logger() @implementer(IHttpResponse) class TxRequestsResponseWrapper: def __init__(self, res): self._res = res def content(self): return defer.succeed(self._res.content) def json(self): return defer.succeed(self._res.json()) @property def code(self): return self._res.status_code class HTTPClientService(service.SharedService): """A SharedService class that can make http requests to remote services. I can use either txrequests or treq, depending on what I find installed I provide minimal get/post/put/delete API with automatic baseurl joining, and json data encoding that is suitable for use from buildbot services. """ TREQ_PROS_AND_CONS = textwrap.dedent(""" txrequests is based on requests and is probably a bit more mature, but it requires threads to run, so has more overhead. treq is better integrated in twisted and is more and more feature equivalent txrequests is 2.8x slower than treq due to the use of threads. http://treq.readthedocs.io/en/latest/#feature-parity-w-requests pip install txrequests or pip install treq """) # Those could be in theory be overridden in master.cfg by using # import buildbot.util.httpclientservice.HTTPClientService.PREFER_TREQ = True # We prefer at the moment keeping it simple PREFER_TREQ = False MAX_THREADS = 5 def __init__(self, base_url, auth=None, headers=None, verify=None, debug=False): assert not base_url.endswith( "/"), "baseurl should not end with /: " + base_url super().__init__() self._base_url = base_url self._auth = auth self._headers = headers self._pool = None self._session = None self.verify = verify self.debug = debug def updateHeaders(self, headers): if self._headers is None: self._headers = {} self._headers.update(headers) @staticmethod def checkAvailable(from_module): """Call me at checkConfig time to properly report config error if neither txrequests or treq is installed """ if txrequests is None and treq is None: config.error("neither txrequests nor treq is installed, but {} is requiring it\n\n{}".format( from_module, HTTPClientService.TREQ_PROS_AND_CONS)) def startService(self): # treq only supports basicauth, so we force txrequests if the auth is # something else if self._auth is not None and not isinstance(self._auth, tuple): self.PREFER_TREQ = False if txrequests is not None and not self.PREFER_TREQ: self._session = txrequests.Session() self._doRequest = self._doTxRequest elif treq is None: raise ImportError("{classname} requires either txrequest or treq install." " Users should call {classname}.checkAvailable() during checkConfig()" " to properly alert the user.".format(classname=self.__class__.__name__)) else: self._doRequest = self._doTReq self._pool = HTTPConnectionPool(self.master.reactor) self._pool.maxPersistentPerHost = self.MAX_THREADS self._agent = Agent(self.master.reactor, pool=self._pool) return super().startService() @defer.inlineCallbacks def stopService(self): if self._session: yield self._session.close() if self._pool: yield self._pool.closeCachedConnections() yield super().stopService() def _prepareRequest(self, ep, kwargs): assert ep == "" or ep.startswith("/"), "ep should start with /: " + ep url = self._base_url + ep if self._auth is not None and 'auth' not in kwargs: kwargs['auth'] = self._auth headers = kwargs.get('headers', {}) if self._headers is not None: headers.update(self._headers) kwargs['headers'] = headers # we manually do the json encoding in order to automatically convert timestamps # for txrequests and treq json = kwargs.pop('json', None) if isinstance(json, dict): jsonStr = jsonmodule.dumps(json, default=toJson) jsonBytes = unicode2bytes(jsonStr) kwargs['headers']['Content-Type'] = 'application/json' kwargs['data'] = jsonBytes return url, kwargs @defer.inlineCallbacks def _doTxRequest(self, method, ep, **kwargs): url, kwargs = yield self._prepareRequest(ep, kwargs) if self.debug: log.debug("http {url} {kwargs}", url=url, kwargs=kwargs) def readContent(session, res): # this forces reading of the content inside the thread res.content if self.debug: log.debug("==> {code}: {content}", code=res.status_code, content=res.content) return res # read the whole content in the thread kwargs['background_callback'] = readContent if self.verify is False: kwargs['verify'] = False res = yield self._session.request(method, url, **kwargs) return IHttpResponse(TxRequestsResponseWrapper(res)) @defer.inlineCallbacks def _doTReq(self, method, ep, **kwargs): url, kwargs = yield self._prepareRequest(ep, kwargs) # treq requires header values to be an array kwargs['headers'] = {k: [v] for k, v in kwargs['headers'].items()} kwargs['agent'] = self._agent res = yield getattr(treq, method)(url, **kwargs) return IHttpResponse(res) # lets be nice to the auto completers, and don't generate that code def get(self, ep, **kwargs): return self._doRequest('get', ep, **kwargs) def put(self, ep, **kwargs): return self._doRequest('put', ep, **kwargs) def delete(self, ep, **kwargs): return self._doRequest('delete', ep, **kwargs) def post(self, ep, **kwargs): return self._doRequest('post', ep, **kwargs) buildbot-2.6.0/master/buildbot/util/identifiers.py000066400000000000000000000037201361162603000222560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from buildbot import util ident_re = re.compile('^[a-zA-Z\u00a0-\U0010ffff_-][a-zA-Z0-9\u00a0-\U0010ffff_-]*$', flags=re.UNICODE) initial_re = re.compile('^[^a-zA-Z_-]') subsequent_re = re.compile('[^a-zA-Z0-9_-]') trailing_digits_re = re.compile('_([0-9]+)$') def isIdentifier(maxLength, obj): if not isinstance(obj, str): return False elif not ident_re.match(obj): return False elif not obj or len(obj) > maxLength: return False return True def forceIdentifier(maxLength, s): if not isinstance(s, str): raise TypeError("%r cannot be coerced to an identifier" % (str,)) # usually bytes2unicode can handle it s = util.bytes2unicode(s) if isIdentifier(maxLength, s): return s # trim to length and substitute out invalid characters s = s[:maxLength] s = initial_re.sub('_', s) s = subsequent_re.subn('_', s)[0] return s def incrementIdentifier(maxLength, ident): num = 1 mo = trailing_digits_re.search(ident) if mo: ident = ident[:mo.start(1) - 1] num = int(mo.group(1)) num = '_%d' % (num + 1) if len(num) > maxLength: raise ValueError("cannot generate a larger identifier") ident = ident[:maxLength - len(num)] + num return ident buildbot-2.6.0/master/buildbot/util/kubeclientservice.py000066400000000000000000000235441361162603000234650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import abc import base64 import os import time from twisted.internet import defer from twisted.internet import reactor from twisted.internet.error import ProcessExitedAlready from twisted.internet.protocol import ProcessProtocol from twisted.python.failure import Failure from buildbot import config from buildbot.util import asyncSleep from buildbot.util.httpclientservice import HTTPClientService from buildbot.util.logger import Logger from buildbot.util.service import BuildbotService log = Logger() # this is a BuildbotService, so that it can be started and destroyed. # this is needed to implement kubectl proxy lifecycle class KubeConfigLoaderBase(BuildbotService): name = "KubeConfig" @abc.abstractmethod def getConfig(self): """ @return dictionary with optional params { 'master_url': 'https://kube_master.url', 'namespace': 'default_namespace', 'headers' { 'Authentication': XXX } # todo (quite hard to implement with treq): 'cert': 'optional client certificate used to connect to ssl' 'verify': 'kube master certificate authority to use to connect' } """ def getAuthorization(self): return None def __str__(self): """return unique str for SharedService""" # hash is implemented from ComparableMixin return "{}({})".format(self.__class__.__name__, hash(self)) class KubeHardcodedConfig(KubeConfigLoaderBase): def reconfigService(self, master_url=None, bearerToken=None, basicAuth=None, headers=None, cert=None, verify=None, namespace="default"): self.config = {'master_url': master_url, 'namespace': namespace, 'headers': {}} if headers is not None: self.config['headers'] = headers if basicAuth and bearerToken: raise Exception("set one of basicAuth and bearerToken, not both") self.basicAuth = basicAuth self.bearerToken = bearerToken if cert is not None: self.config['cert'] = cert if verify is not None: self.config['verify'] = verify checkConfig = reconfigService @defer.inlineCallbacks def getAuthorization(self): if self.basicAuth is not None: basicAuth = yield self.renderSecrets(self.basicAuth) authstring = "{user}:{password}".format(**basicAuth).encode('utf-8') encoded = base64.b64encode(authstring) return defer.returnValue("Basic {0}".format(encoded)) if self.bearerToken is not None: bearerToken = yield self.renderSecrets(self.bearerToken) return defer.returnValue("Bearer {0}".format(bearerToken)) return defer.returnValue(None) def getConfig(self): return self.config class KubeCtlProxyConfigLoader(KubeConfigLoaderBase): """ We use kubectl proxy to connect to kube master. Parsing the config and setting up SSL is complex. So for now, we use kubectl proxy to load the config and connect to master. This will run the kube proxy as a subprocess, and return configuration with http://localhost:PORT """ kube_ctl_proxy_cmd = ['kubectl', 'proxy'] # for tests override class LocalPP(ProcessProtocol): def __init__(self): self.got_output_deferred = defer.Deferred() self.terminated_deferred = defer.Deferred() self.first_line = b"" def outReceived(self, data): if not self.got_output_deferred.called: self.first_line += data if b"\n" in self.first_line: self.got_output_deferred.callback(self.first_line.split(b"\n")[0]) def errReceived(self, data): if not self.got_output_deferred.called: self.got_output_deferred.errback(Failure(RuntimeError(data))) def processEnded(self, status_object): self.terminated_deferred.callback(None) def checkConfig(self, proxy_port=8001, namespace="default"): self.pp = None self.process = None @defer.inlineCallbacks def ensureSubprocessKilled(self): if self.pp is not None: try: self.process.signalProcess("TERM") except ProcessExitedAlready: pass # oh well yield self.pp.terminated_deferred @defer.inlineCallbacks def reconfigService(self, proxy_port=8001, namespace="default"): self.proxy_port = proxy_port self.namespace = namespace yield self.ensureSubprocessKilled() self.pp = self.LocalPP() self.process = reactor.spawnProcess( self.pp, self.kube_ctl_proxy_cmd[0], self.kube_ctl_proxy_cmd + ["-p", str(self.proxy_port)], env=None) self.kube_proxy_output = yield self.pp.got_output_deferred def stopService(self): return self.ensureSubprocessKilled() def getConfig(self): return { 'master_url': "http://localhost:{}".format(self.proxy_port), 'namespace': self.namespace } class KubeInClusterConfigLoader(KubeConfigLoaderBase): kube_dir = '/var/run/secrets/kubernetes.io/serviceaccount/' kube_namespace_file = os.path.join(kube_dir, 'namespace') kube_token_file = os.path.join(kube_dir, 'token') kube_cert_file = os.path.join(kube_dir, 'ca.crt') def checkConfig(self): if not os.path.exists(self.kube_dir): config.error( "Not in kubernetes cluster (kube_dir not found: {})".format( self.kube_dir)) def reconfigService(self): self.config = {} self.config['master_url'] = os.environ['KUBERNETES_PORT'].replace( 'tcp', 'https') self.config['verify'] = self.kube_cert_file with open(self.kube_token_file, encoding="utf-8") as token_content: token = token_content.read().strip() self.config['headers'] = { 'Authorization': 'Bearer {0}'.format(token) } with open(self.kube_namespace_file, encoding="utf-8") as namespace_content: self.config['namespace'] = namespace_content.read().strip() def getConfig(self): return self.config class KubeError(RuntimeError): def __init__(self, response_json): super().__init__(response_json['message']) self.json = response_json self.reason = response_json.get('reason') class KubeClientService(HTTPClientService): def __init__(self, kube_config=None): self.config = kube_config super().__init__('') self._namespace = None kube_config.setServiceParent(self) @defer.inlineCallbacks def _prepareRequest(self, ep, kwargs): config = self.config.getConfig() self._base_url = config['master_url'] url, req_kwargs = super()._prepareRequest(ep, kwargs) if 'headers' not in req_kwargs: req_kwargs['headers'] = {} if 'headers' in config: req_kwargs['headers'].update(config['headers']) auth = yield self.config.getAuthorization() if auth is not None: req_kwargs['headers']['Authorization'] = auth # warning: this only works with txrequests! not treq for arg in ['cert', 'verify']: if arg in config: req_kwargs[arg] = config[arg] return defer.returnValue((url, req_kwargs)) @defer.inlineCallbacks def createPod(self, namespace, spec): url = '/api/v1/namespaces/{namespace}/pods'.format(namespace=namespace) res = yield self.post(url, json=spec) res_json = yield res.json() if res.code not in (200, 201, 202): raise KubeError(res_json) defer.returnValue(res_json) @defer.inlineCallbacks def deletePod(self, namespace, name, graceperiod=0): url = '/api/v1/namespaces/{namespace}/pods/{name}'.format( namespace=namespace, name=name) res = yield self.delete(url, params={'graceperiod': graceperiod}) res_json = yield res.json() if res.code != 200: raise KubeError(res_json) defer.returnValue(res_json) @defer.inlineCallbacks def waitForPodDeletion(self, namespace, name, timeout): t1 = time.time() url = '/api/v1/namespaces/{namespace}/pods/{name}/status'.format( namespace=namespace, name=name) while True: if time.time() - t1 > timeout: raise TimeoutError( "Did not see pod {name} terminate after {timeout}s".format( name=name, timeout=timeout)) res = yield self.get(url) res_json = yield res.json() if res.code == 404: break # 404 means the pod has terminated if res.code != 200: raise KubeError(res_json) yield asyncSleep(1) defer.returnValue(res_json) @property def namespace(self): if self._namespace is None: self._namespace = self.config.getConfig()['namespace'] return self._namespace buildbot-2.6.0/master/buildbot/util/latent.py000066400000000000000000000035771361162603000212520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy from twisted.internet import defer class CompatibleLatentWorkerMixin: builds_may_be_incompatible = True _actual_build_props = None def renderWorkerProps(self, build): # Deriving classes should implement this method to render and return # a Deferred that will have all properties that are needed to start a # worker as its result. The Deferred should result in data that can # be copied via copy.deepcopy # # During actual startup, renderWorkerPropsOnStart should be called # which will invoke renderWorkerProps, store a copy of the results for # later comparison and return them. raise NotImplementedError() @defer.inlineCallbacks def renderWorkerPropsOnStart(self, build): props = yield self.renderWorkerProps(build) self._actual_build_props = copy.deepcopy(props) defer.returnValue(props) @defer.inlineCallbacks def isCompatibleWithBuild(self, build): if self._actual_build_props is None: defer.returnValue(True) requested_props = yield self.renderWorkerProps(build) defer.returnValue(requested_props == self._actual_build_props) buildbot-2.6.0/master/buildbot/util/lineboundaries.py000066400000000000000000000060251361162603000227550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from buildbot.util.logger import Logger log = Logger() class LineBoundaryFinder: __slots__ = ['partialLine', 'callback', 'warned'] # split at reasonable line length. # too big lines will fill master's memory, and slow down the UI too much. MAX_LINELENGTH = 4096 # the lookahead here (`(?=.)`) ensures that `\r` doesn't match at the end # of the buffer # we also convert cursor control sequence to newlines # and ugly \b+ (use of backspace to implement progress bar) newline_re = re.compile(r'(\r\n|\r(?=.)|\033\[u|\033\[[0-9]+;[0-9]+[Hf]|\033\[2J|\x08+)') def __init__(self, callback): self.partialLine = None self.callback = callback self.warned = False def append(self, text): if self.partialLine: if len(self.partialLine) > self.MAX_LINELENGTH: if not self.warned: # Unfortunately we cannot give more hint as per which log that is log.warn("Splitting long line: {line_start} {length} (not warning anymore for this log)", line_start=self.partialLine[:30], length=len(self.partialLine)) self.warned = True # switch the variables, and return previous _partialLine_, # split every MAX_LINELENGTH plus a trailing \n self.partialLine, text = text, self.partialLine ret = [] while len(text) > self.MAX_LINELENGTH: ret.append(text[:self.MAX_LINELENGTH]) text = text[self.MAX_LINELENGTH:] ret.append(text) return self.callback("\n".join(ret) + "\n") text = self.partialLine + text self.partialLine = None text = self.newline_re.sub('\n', text) if text: if text[-1] != '\n': i = text.rfind('\n') if i >= 0: i = i + 1 text, self.partialLine = text[:i], text[i:] else: self.partialLine = text return defer.succeed(None) return self.callback(text) return defer.succeed(None) def flush(self): if self.partialLine: return self.append('\n') return defer.succeed(None) buildbot-2.6.0/master/buildbot/util/logger.py000066400000000000000000000025711361162603000212330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members try: from twisted.logger import Logger except ImportError: from twisted.python import log class Logger: """A simplistic backporting of the new logger system for old versions of twisted""" def _log(self, format, *args, **kwargs): log.msg(format.format(args, **kwargs)) # legacy logging system do not support log level. # We don't bother inventing something. If needed, user can upgrade debug = _log info = _log warn = _log error = _log critical = _log def failure(self, format, failure, *args, **kwargs): log.error(failure, format.format(args, **kwargs)) __all__ = ["Logger"] buildbot-2.6.0/master/buildbot/util/lru.py000066400000000000000000000160331361162603000205540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from collections import defaultdict from collections import deque from itertools import filterfalse from weakref import WeakValueDictionary from twisted.internet import defer from twisted.python import log class LRUCache: """ A least-recently-used cache, with a fixed maximum size. See buildbot manual for more information. """ __slots__ = ('max_size max_queue miss_fn queue cache weakrefs ' 'refcount hits refhits misses'.split()) sentinel = object() QUEUE_SIZE_FACTOR = 10 def __init__(self, miss_fn, max_size=50): self.max_size = max_size self.max_queue = max_size * self.QUEUE_SIZE_FACTOR self.queue = deque() self.cache = {} self.weakrefs = WeakValueDictionary() self.hits = self.misses = self.refhits = 0 self.refcount = defaultdict(lambda: 0) self.miss_fn = miss_fn def put(self, key, value): cached = key in self.cache or key in self.weakrefs self.cache[key] = value self.weakrefs[key] = value self._ref_key(key) if not cached: self._purge() def get(self, key, **miss_fn_kwargs): try: return self._get_hit(key) except KeyError: pass self.misses += 1 result = self.miss_fn(key, **miss_fn_kwargs) if result is not None: self.cache[key] = result self.weakrefs[key] = result self._ref_key(key) self._purge() return result def keys(self): return list(self.cache) def set_max_size(self, max_size): if self.max_size == max_size: return self.max_size = max_size self.max_queue = max_size * self.QUEUE_SIZE_FACTOR self._purge() def inv(self): global inv_failed # the keys of the queue and cache should be identical cache_keys = set(self.cache.keys()) queue_keys = set(self.queue) if queue_keys - cache_keys: log.msg("INV: uncached keys in queue:", queue_keys - cache_keys) inv_failed = True if cache_keys - queue_keys: log.msg("INV: unqueued keys in cache:", cache_keys - queue_keys) inv_failed = True # refcount should always represent the number of times each key appears # in the queue exp_refcount = dict() for k in self.queue: exp_refcount[k] = exp_refcount.get(k, 0) + 1 if exp_refcount != self.refcount: log.msg("INV: refcounts differ:") log.msg(" expected:", sorted(exp_refcount.items())) log.msg(" got:", sorted(self.refcount.items())) inv_failed = True def _ref_key(self, key): """Record a reference to the argument key.""" queue = self.queue refcount = self.refcount queue.append(key) refcount[key] = refcount[key] + 1 # periodically compact the queue by eliminating duplicate keys # while preserving order of most recent access. Note that this # is only required when the cache does not exceed its maximum # size if len(queue) > self.max_queue: refcount.clear() queue_appendleft = queue.appendleft queue_appendleft(self.sentinel) for k in filterfalse(refcount.__contains__, iter(queue.pop, self.sentinel)): queue_appendleft(k) refcount[k] = 1 def _get_hit(self, key): """Try to do a value lookup from the existing cache entries.""" try: result = self.cache[key] self.hits += 1 self._ref_key(key) return result except KeyError: pass result = self.weakrefs[key] self.refhits += 1 self.cache[key] = result self._ref_key(key) return result def _purge(self): """ Trim the cache down to max_size by evicting the least-recently-used entries. """ if len(self.cache) <= self.max_size: return cache = self.cache refcount = self.refcount queue = self.queue max_size = self.max_size # purge least recently used entries, using refcount to count entries # that appear multiple times in the queue while len(cache) > max_size: refc = 1 while refc: k = queue.popleft() refc = refcount[k] = refcount[k] - 1 del cache[k] del refcount[k] class AsyncLRUCache(LRUCache): """ An LRU cache with asynchronous locking to ensure that in the common case of multiple concurrent requests for the same key, only one fetch is performed. """ __slots__ = ['concurrent'] def __init__(self, miss_fn, max_size=50): super().__init__(miss_fn, max_size=max_size) self.concurrent = {} def get(self, key, **miss_fn_kwargs): try: result = self._get_hit(key) return defer.succeed(result) except KeyError: pass concurrent = self.concurrent conc = concurrent.get(key) if conc: self.hits += 1 d = defer.Deferred() conc.append(d) return d # if we're here, we've missed and need to fetch self.misses += 1 # create a list of waiting deferreds for this key d = defer.Deferred() assert key not in concurrent concurrent[key] = [d] miss_d = self.miss_fn(key, **miss_fn_kwargs) def handle_result(result): if result is not None: self.cache[key] = result self.weakrefs[key] = result # reference the key once, possibly standing in for multiple # concurrent accesses self._ref_key(key) self._purge() # and fire all of the waiting Deferreds dlist = concurrent.pop(key) for d in dlist: d.callback(result) def handle_failure(f): # errback all of the waiting Deferreds dlist = concurrent.pop(key) for d in dlist: d.errback(f) miss_d.addCallbacks(handle_result, handle_failure) miss_d.addErrback(log.err) return d # for tests inv_failed = False buildbot-2.6.0/master/buildbot/util/maildir.py000066400000000000000000000140601361162603000213710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This is a class which watches a maildir for new messages. It uses the linux dirwatcher API (if available) to look for new files. The .messageReceived method is invoked with the filename of the new message, relative to the top of the maildir (so it will look like "new/blahblah"). """ import os from twisted.application import internet from twisted.internet import defer from twisted.internet import reactor # We have to put it here, since we use it to provide feedback from twisted.python import log from twisted.python import runtime from buildbot.util import service dnotify = None try: import dnotify except ImportError: log.msg("unable to import dnotify, so Maildir will use polling instead") class NoSuchMaildir(Exception): pass class MaildirService(service.BuildbotService): pollinterval = 10 # only used if we don't have DNotify name = 'MaildirService' def __init__(self, basedir=None): super().__init__() if basedir: self.setBasedir(basedir) self.files = [] self.dnotify = None self.timerService = None def setBasedir(self, basedir): # some users of MaildirService (scheduler.Try_Jobdir, in particular) # don't know their basedir until setServiceParent, since it is # relative to the buildmaster's basedir. So let them set it late. We # don't actually need it until our own startService. self.basedir = basedir self.newdir = os.path.join(self.basedir, "new") self.curdir = os.path.join(self.basedir, "cur") @defer.inlineCallbacks def startService(self): if not os.path.isdir(self.newdir) or not os.path.isdir(self.curdir): raise NoSuchMaildir("invalid maildir '%s'" % self.basedir) try: if dnotify: # we must hold an fd open on the directory, so we can get # notified when it changes. self.dnotify = dnotify.DNotify(self.newdir, self.dnotify_callback, [dnotify.DNotify.DN_CREATE]) except (IOError, OverflowError): # IOError is probably linux<2.4.19, which doesn't support # dnotify. OverflowError will occur on some 64-bit machines # because of a python bug log.msg("DNotify failed, falling back to polling") if not self.dnotify: self.timerService = internet.TimerService( self.pollinterval, self.poll) yield self.timerService.setServiceParent(self) self.poll() yield super().startService() def dnotify_callback(self): log.msg("dnotify noticed something, now polling") # give it a moment. I found that qmail had problems when the message # was removed from the maildir instantly. It shouldn't, that's what # maildirs are made for. I wasn't able to eyeball any reason for the # problem, and safecat didn't behave the same way, but qmail reports # "Temporary_error_on_maildir_delivery" (qmail-local.c:165, # maildir_child() process exited with rc not in 0,2,3,4). Not sure # why, and I'd have to hack qmail to investigate further, so it's # easier to just wait a second before yanking the message out of new/ reactor.callLater(0.1, self.poll) def stopService(self): if self.dnotify: self.dnotify.remove() self.dnotify = None if self.timerService is not None: self.timerService.disownServiceParent() self.timerService = None return super().stopService() @defer.inlineCallbacks def poll(self): try: assert self.basedir # see what's new for f in self.files: if not os.path.isfile(os.path.join(self.newdir, f)): self.files.remove(f) newfiles = [] for f in os.listdir(self.newdir): if f not in self.files: newfiles.append(f) self.files.extend(newfiles) for n in newfiles: try: yield self.messageReceived(n) except Exception: log.err( None, "while reading '%s' from maildir '%s':" % (n, self.basedir)) except Exception: log.err(None, "while polling maildir '%s':" % (self.basedir,)) def moveToCurDir(self, filename): if runtime.platformType == "posix": # open the file before moving it, because I'm afraid that once # it's in cur/, someone might delete it at any moment path = os.path.join(self.newdir, filename) f = open(path, "r") os.rename(os.path.join(self.newdir, filename), os.path.join(self.curdir, filename)) elif runtime.platformType == "win32": # do this backwards under windows, because you can't move a file # that somebody is holding open. This was causing a Permission # Denied error on bear's win32-twisted1.3 worker. os.rename(os.path.join(self.newdir, filename), os.path.join(self.curdir, filename)) path = os.path.join(self.curdir, filename) f = open(path, "r") return f def messageReceived(self, filename): raise NotImplementedError buildbot-2.6.0/master/buildbot/util/misc.py000066400000000000000000000032441361162603000207050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Miscellaneous utilities; these should be imported from C{buildbot.util}, not directly from this module. """ import os from twisted.internet import reactor def deferredLocked(lock_or_attr): def decorator(fn): def wrapper(*args, **kwargs): lock = lock_or_attr if isinstance(lock, str): lock = getattr(args[0], lock) return lock.run(fn, *args, **kwargs) return wrapper return decorator def cancelAfter(seconds, deferred, _reactor=reactor): delayedCall = _reactor.callLater(seconds, deferred.cancel) # cancel the delayedCall when the underlying deferred fires @deferred.addBoth def cancelTimer(x): if delayedCall.active(): delayedCall.cancel() return x return deferred def writeLocalFile(path, contents, mode=None): # pragma: no cover with open(path, 'w') as file: if mode is not None: os.chmod(path, mode) file.write(contents) buildbot-2.6.0/master/buildbot/util/netstrings.py000066400000000000000000000044611361162603000221540ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet.interfaces import IAddress from twisted.internet.interfaces import ITransport from twisted.protocols import basic from zope.interface import implementer from buildbot.util import unicode2bytes @implementer(IAddress) class NullAddress: "an address for NullTransport" @implementer(ITransport) class NullTransport: "a do-nothing transport to make NetstringReceiver happy" def write(self, data): raise NotImplementedError def writeSequence(self, data): raise NotImplementedError def loseConnection(self): pass def getPeer(self): return NullAddress def getHost(self): return NullAddress class NetstringParser(basic.NetstringReceiver): """ Adapts the Twisted netstring support (which assumes it is on a socket) to work on simple strings, too. Call the C{feed} method with arbitrary blocks of data, and override the C{stringReceived} method to get called for each embedded netstring. The default implementation collects the netstrings in the list C{self.strings}. """ def __init__(self): # most of the complexity here is stubbing out the transport code so # that Twisted-10.2.0 and higher believes that this is a valid protocol self.makeConnection(NullTransport()) self.strings = [] def feed(self, data): data = unicode2bytes(data) self.dataReceived(data) # dataReceived handles errors unusually quietly! if self.brokenPeer: raise basic.NetstringParseError def stringReceived(self, string): self.strings.append(string) buildbot-2.6.0/master/buildbot/util/pathmatch.py000066400000000000000000000052361361162603000217260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re _ident_re = re.compile('^[a-zA-Z_-][.a-zA-Z0-9_-]*$') def ident(x): if _ident_re.match(x): return x raise TypeError class Matcher: def __init__(self): self._patterns = {} self._dirty = True def __setitem__(self, path, value): assert path not in self._patterns, "duplicate path %s" % (path,) self._patterns[path] = value self._dirty = True def __repr__(self): return '' % (self._patterns,) path_elt_re = re.compile('^(.?):([a-z0-9_.]+)$') type_fns = dict(n=int, i=ident) def __getitem__(self, path): if self._dirty: self._compile() patterns = self._by_length.get(len(path), {}) for pattern in patterns: kwargs = {} for pattern_elt, path_elt in zip(pattern, path): mo = self.path_elt_re.match(pattern_elt) if mo: type_flag, arg_name = mo.groups() if type_flag: try: type_fn = self.type_fns[type_flag] except Exception: assert type_flag in self.type_fns, \ "no such type flag %s" % type_flag try: path_elt = type_fn(path_elt) except Exception: break kwargs[arg_name] = path_elt else: if pattern_elt != path_elt: break else: # complete match return patterns[pattern], kwargs else: raise KeyError('No match for %r' % (path,)) def iterPatterns(self): return list(self._patterns.items()) def _compile(self): self._by_length = {} for k, v in self.iterPatterns(): length = len(k) self._by_length.setdefault(length, {})[k] = v buildbot-2.6.0/master/buildbot/util/poll.py000066400000000000000000000071121361162603000207160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.internet import task from twisted.python import log _poller_instances = None class Poller: __slots__ = ['fn', 'instance', 'loop', 'started', 'running', 'pending', 'stopDeferreds', '_reactor'] def __init__(self, fn, instance, reactor): self.fn = fn self.instance = instance self.loop = None self.started = False self.running = False self.pending = False self.stopDeferreds = [] self._reactor = reactor @defer.inlineCallbacks def _run(self): self.running = True try: yield defer.maybeDeferred(self.fn, self.instance) except Exception as e: log.err(e, 'while running %s' % (self.fn,)) self.running = False # loop if there's another pending call if self.pending: self.pending = False yield self._run() def __call__(self): if self.started: if self.running: self.pending = True else: # terrible hack.. old_interval = self.loop.interval self.loop.interval = 0 self.loop.reset() self.loop.interval = old_interval def start(self, interval, now=False): assert not self.started if not self.loop: self.loop = task.LoopingCall(self._run) self.loop.clock = self._reactor stopDeferred = self.loop.start(interval, now=now) @stopDeferred.addCallback def inform(_): self.started = False while self.stopDeferreds: self.stopDeferreds.pop().callback(None) self.started = True def stop(self): if self.loop and self.loop.running: self.loop.stop() if self.started: d = defer.Deferred() self.stopDeferreds.append(d) return d return defer.succeed(None) class _Descriptor: def __init__(self, fn, attrName): self.fn = fn self.attrName = attrName def __get__(self, instance, cls): try: poller = getattr(instance, self.attrName) except AttributeError: poller = Poller(self.fn, instance, instance.master.reactor) setattr(instance, self.attrName, poller) # track instances when testing if _poller_instances is not None: _poller_instances.append((instance, self.attrName)) return poller def method(fn): stateName = "__poll_" + fn.__name__ + "__" return _Descriptor(fn, stateName) def track_poll_methods(): global _poller_instances _poller_instances = [] def reset_poll_methods(): global _poller_instances for instance, attrname in _poller_instances: # pylint: disable=not-an-iterable delattr(instance, attrname) _poller_instances = None buildbot-2.6.0/master/buildbot/util/private_tempdir.py000066400000000000000000000025771361162603000231600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import shutil import tempfile class PrivateTemporaryDirectory: """ Works similarly to python 3.2+ TemporaryDirectory except the also sets the permissions of the created directory and Note, that Windows ignores the permissions. """ def __init__(self, suffix=None, prefix=None, dir=None, mode=0o700): self.name = tempfile.mkdtemp(suffix, prefix, dir) self.mode = mode self._cleanup_needed = True def __enter__(self): return self.name def __exit__(self, exc, value, tb): self.cleanup() def cleanup(self): if self._cleanup_needed: shutil.rmtree(self.name) self._cleanup_needed = False buildbot-2.6.0/master/buildbot/util/raml.py000066400000000000000000000076671361162603000207220ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import copy import json import os import yaml try: from collections import OrderedDict except ImportError: # pragma: no cover from ordereddict import OrderedDict # minimalistic raml loader. Support !include tags, and mapping as OrderedDict class RamlLoader(yaml.SafeLoader): pass def construct_include(loader, node): path = os.path.join(os.path.dirname(loader.stream.name), node.value) with open(path) as f: return yaml.load(f, Loader=RamlLoader) def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) RamlLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) RamlLoader.add_constructor('!include', construct_include) class RamlSpec: """ This class loads the raml specification, and expose useful aspects of the spec Main usage for now is for the doc, but it can be extended to make sure raml spec matches other spec implemented in the tests """ def __init__(self): fn = os.path.join(os.path.dirname(__file__), os.pardir, 'spec', 'api.raml') with open(fn) as f: self.api = yaml.load(f, Loader=RamlLoader) with open(fn) as f: self.rawraml = f.read() endpoints = {} self.endpoints_by_type = {} self.rawendpoints = {} self.endpoints = self.parse_endpoints(endpoints, "", self.api) self.types = self.parse_types() def parse_endpoints(self, endpoints, base, api, uriParameters=None): if uriParameters is None: uriParameters = OrderedDict() for k, v in api.items(): if k.startswith("/"): ep = base + k p = copy.deepcopy(uriParameters) if v is not None: p.update(v.get("uriParameters", {})) v["uriParameters"] = p endpoints[ep] = v self.parse_endpoints(endpoints, ep, v, p) elif k in ['get', 'post']: if 'is' in v: for _is in v['is']: if 'bbget' in _is: v['eptype'] = _is['bbget']['bbtype'] self.endpoints_by_type.setdefault(v['eptype'], {}) self.endpoints_by_type[v['eptype']][base] = api if 'bbgetraw' in _is: self.rawendpoints.setdefault(base, {}) self.rawendpoints[base] = api return endpoints def reindent(self, s, indent): return s.replace("\n", "\n" + " " * indent) def format_json(self, j, indent): j = json.dumps(j, indent=4).replace(", \n", ",\n") return self.reindent(j, indent) def parse_types(self): types = self.api['types'] return types def iter_actions(self, endpoint): ACTIONS_MAGIC = '/actions/' for k, v in endpoint.items(): if k.startswith(ACTIONS_MAGIC): k = k[len(ACTIONS_MAGIC):] v = v['post'] # simplify the raml tree for easier processing v['body'] = v['body']['application/json'].get('properties', {}) yield (k, v) buildbot-2.6.0/master/buildbot/util/sautils.py000066400000000000000000000054601361162603000214400ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from contextlib import contextmanager import sqlalchemy as sa from sqlalchemy.ext import compiler from sqlalchemy.sql.expression import ClauseElement from sqlalchemy.sql.expression import Executable # from http://www.sqlalchemy.org/docs/core/compiler.html#compiling-sub-elements-of-a-custom-expression-construct # _execution_options per http://docs.sqlalchemy.org/en/rel_0_7/core/compiler.html#enabling-compiled-autocommit # (UpdateBase requires sqlalchemy 0.7.0) class InsertFromSelect(Executable, ClauseElement): _execution_options = \ Executable._execution_options.union({'autocommit': True}) def __init__(self, table, select): self.table = table self.select = select @compiler.compiles(InsertFromSelect) def _visit_insert_from_select(element, compiler, **kw): return "INSERT INTO %s %s" % ( compiler.process(element.table, asfrom=True), compiler.process(element.select) ) def sa_version(): if hasattr(sa, '__version__'): def tryint(s): try: return int(s) except (ValueError, TypeError): return -1 return tuple(map(tryint, sa.__version__.split('.'))) return (0, 0, 0) # "it's old" def Table(*args, **kwargs): """Wrap table creation to add any necessary dialect-specific options""" # work around the case where a database was created for us with # a non-utf8 character set (mysql's default) kwargs['mysql_character_set'] = 'utf8' return sa.Table(*args, **kwargs) @contextmanager def withoutSqliteForeignKeys(engine, connection=None): conn = connection if engine.dialect.name == 'sqlite': if conn is None: conn = engine.connect() # This context is not re-entrant. Ensure it. assert not getattr(engine, 'fk_disabled', False) engine.fk_disabled = True conn.execute('pragma foreign_keys=OFF') try: yield finally: if engine.dialect.name == 'sqlite': engine.fk_disabled = False conn.execute('pragma foreign_keys=ON') if connection is None: conn.close() buildbot-2.6.0/master/buildbot/util/service.py000066400000000000000000000474551361162603000214260ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib from twisted.application import service from twisted.internet import defer from twisted.internet import task from twisted.python import log from twisted.python import reflect from twisted.python.reflect import accumulateClassList from buildbot import util from buildbot.util import bytes2unicode from buildbot.util import config from buildbot.util import unicode2bytes class ReconfigurableServiceMixin: reconfig_priority = 128 @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if not service.IServiceCollection.providedBy(self): return # get a list of child services to reconfigure reconfigurable_services = [svc for svc in self if isinstance(svc, ReconfigurableServiceMixin)] # sort by priority reconfigurable_services.sort(key=lambda svc: -svc.reconfig_priority) for svc in reconfigurable_services: yield svc.reconfigServiceWithBuildbotConfig(new_config) # twisted 16's Service is now an new style class, better put everybody new style # to catch issues even on twisted < 16 class AsyncService(service.Service): @defer.inlineCallbacks def setServiceParent(self, parent): if self.parent is not None: yield self.disownServiceParent() parent = service.IServiceCollection(parent, parent) self.parent = parent yield self.parent.addService(self) # We recurse over the parent services until we find a MasterService @property def master(self): if self.parent is None: return None return self.parent.master class AsyncMultiService(AsyncService, service.MultiService): def startService(self): # Do NOT use super() here. # The method resolution order would cause MultiService.startService() to # be called which we explicitly want to override with this method. service.Service.startService(self) dl = [] # if a service attaches another service during the reconfiguration # then the service will be started twice, so we don't use iter, but rather # copy in a list for svc in list(self): # handle any deferreds, passing up errors and success dl.append(defer.maybeDeferred(svc.startService)) return defer.gatherResults(dl, consumeErrors=True) @defer.inlineCallbacks def stopService(self): # Do NOT use super() here. # The method resolution order would cause MultiService.stopService() to # be called which we explicitly want to override with this method. service.Service.stopService(self) services = list(self) services.reverse() dl = [] for svc in services: if not isinstance(svc, SharedService): dl.append(defer.maybeDeferred(svc.stopService)) # unlike MultiService, consume errors in each individual deferred, and # pass the first error in a child service up to our caller yield defer.gatherResults(dl, consumeErrors=True) for svc in services: if isinstance(svc, SharedService): yield svc.stopService() def addService(self, service): if service.name is not None: if service.name in self.namedServices: raise RuntimeError("cannot have two services with same name" " '%s'" % service.name) self.namedServices[service.name] = service self.services.append(service) if self.running: # It may be too late for that, but we will do our best service.privilegedStartService() return service.startService() return defer.succeed(None) class MasterService(AsyncMultiService): # master service is the service that stops the master property recursion @property def master(self): return self class SharedService(AsyncMultiService): """a service that is created only once per parameter set in a parent service""" @classmethod @defer.inlineCallbacks def getService(cls, parent, *args, **kwargs): name = cls.getName(*args, **kwargs) if name in parent.namedServices: return parent.namedServices[name] instance = cls(*args, **kwargs) # The class is not required to initialized its name # but we use the name to identify the instance in the parent service # so we force it with the name we used instance.name = name yield instance.setServiceParent(parent) # we put the service on top of the list, so that it is stopped the last # This make sense as the shared service is used as a dependency # for other service parent.services.remove(instance) parent.services.insert(0, instance) # hook the return value to the instance object return instance @classmethod def getName(cls, *args, **kwargs): _hash = hashlib.sha1() for arg in args: arg = unicode2bytes(str(arg)) _hash.update(arg) for k, v in sorted(kwargs.items()): k = unicode2bytes(str(k)) v = unicode2bytes(str(v)) _hash.update(k) _hash.update(v) return cls.__name__ + "_" + _hash.hexdigest() class BuildbotService(AsyncMultiService, config.ConfiguredMixin, util.ComparableMixin, ReconfigurableServiceMixin): compare_attrs = ('name', '_config_args', '_config_kwargs') name = None configured = False objectid = None def __init__(self, *args, **kwargs): name = kwargs.pop("name", None) if name is not None: self.name = bytes2unicode(name) self.checkConfig(*args, **kwargs) if self.name is None: raise ValueError( "%s: must pass a name to constructor" % type(self)) self._config_args = args self._config_kwargs = kwargs self.rendered = False super().__init__() def getConfigDict(self): _type = type(self) return {'name': self.name, 'class': _type.__module__ + "." + _type.__name__, 'args': self._config_args, 'kwargs': self._config_kwargs} @defer.inlineCallbacks def reconfigServiceWithSibling(self, sibling): # only reconfigure if sibling is configured differently. # sibling == self is using ComparableMixin's implementation # only compare compare_attrs if self.configured and sibling == self: return None self.configured = True # render renderables in parallel # Properties import to resolve cyclic import issue from buildbot.process.properties import Properties p = Properties() p.master = self.master # render renderables in parallel secrets = [] kwargs = {} accumulateClassList(self.__class__, 'secrets', secrets) for k, v in sibling._config_kwargs.items(): if k in secrets: # for non reconfigurable services, we force the attribute v = yield p.render(v) setattr(sibling, k, v) setattr(self, k, v) kwargs[k] = v d = yield self.reconfigService(*sibling._config_args, **kwargs) return d def configureService(self): # reconfigServiceWithSibling with self, means first configuration return self.reconfigServiceWithSibling(self) @defer.inlineCallbacks def startService(self): if not self.configured: try: yield self.configureService() except NotImplementedError: pass yield super().startService() def checkConfig(self, *args, **kwargs): return defer.succeed(True) def reconfigService(self, name=None, *args, **kwargs): return defer.succeed(None) def renderSecrets(self, *args): # Properties import to resolve cyclic import issue from buildbot.process.properties import Properties p = Properties() p.master = self.master if len(args) == 1: return p.render(args[0]) return defer.gatherResults([p.render(s) for s in args], consumeErrors=True) class ClusteredBuildbotService(BuildbotService): """ ClusteredBuildbotService-es are meant to be executed on a single master only. When starting such a service, by means of "yield startService", it will first try to claim it on the current master and: - return without actually starting it if it was already claimed by another master (self.active == False). It will however keep trying to claim it, in case another master stops, and takes the job back. - return after it starts else. """ compare_attrs = ('name',) POLL_INTERVAL_SEC = 5 * 60 # 5 minutes serviceid = None active = False def __init__(self, *args, **kwargs): self.serviceid = None self.active = False self._activityPollCall = None self._activityPollDeferred = None super().__init__(*args, **kwargs) # activity handling def isActive(self): return self.active def activate(self): # will run when this instance becomes THE CHOSEN ONE for the cluster return defer.succeed(None) def deactivate(self): # to be overridden by subclasses # will run when this instance loses its chosen status return defer.succeed(None) # service arbitration hooks def _getServiceId(self): # retrieve the id for this service; we assume that, once we have a valid id, # the id doesn't change. This may return a Deferred. raise NotImplementedError def _claimService(self): # Attempt to claim the service for this master. Should return True or False # (optionally via a Deferred) to indicate whether this master now owns the # service. raise NotImplementedError def _unclaimService(self): # Release the service from this master. This will only be called by a claimed # service, and this really should be robust and release the claim. May return # a Deferred. raise NotImplementedError # default implementation to delegate to the above methods @defer.inlineCallbacks def startService(self): # subclasses should override startService only to perform actions that should # run on all instances, even if they never get activated on this # master. yield super().startService() self._startServiceDeferred = defer.Deferred() self._startActivityPolling() yield self._startServiceDeferred @defer.inlineCallbacks def stopService(self): # subclasses should override stopService only to perform actions that should # run on all instances, even if they never get activated on this # master. self._stopActivityPolling() # need to wait for prior activations to finish if self._activityPollDeferred: yield self._activityPollDeferred if self.active: self.active = False try: yield self.deactivate() yield self._unclaimService() except Exception as e: log.err(e, _why="Caught exception while deactivating ClusteredService(%s)" % self.name) yield super().stopService() def _startActivityPolling(self): self._activityPollCall = task.LoopingCall(self._activityPoll) # plug in a clock if we have one, for tests if hasattr(self, 'clock'): self._activityPollCall.clock = self.clock d = self._activityPollCall.start(self.POLL_INTERVAL_SEC, now=True) self._activityPollDeferred = d # this should never happen, but just in case: d.addErrback(log.err, 'while polling for service activity:') def _stopActivityPolling(self): if self._activityPollCall: self._activityPollCall.stop() self._activityPollCall = None return self._activityPollDeferred def _callbackStartServiceDeferred(self): if self._startServiceDeferred is not None: self._startServiceDeferred.callback(None) self._startServiceDeferred = None @defer.inlineCallbacks def _activityPoll(self): try: # just in case.. if self.active: return if self.serviceid is None: self.serviceid = yield self._getServiceId() try: claimed = yield self._claimService() except Exception: log.err( _why='WARNING: ClusteredService(%s) got exception while trying to claim' % self.name) return if not claimed: # this master is not responsible # for this service, we callback for StartService # if it was not callback-ed already, # and keep polling to take back the service # if another one lost it self._callbackStartServiceDeferred() return try: # this master is responsible for this service # we activate it self.active = True yield self.activate() except Exception: # this service is half-active, and noted as such in the db.. log.err( _why='WARNING: ClusteredService(%s) is only partially active' % self.name) finally: # cannot wait for its deactivation # with yield self._stopActivityPolling # as we're currently executing the # _activityPollCall callback # we just call it without waiting its stop # (that may open race conditions) self._stopActivityPolling() self._callbackStartServiceDeferred() except Exception: # don't pass exceptions into LoopingCall, which can cause it to # fail log.err( _why='WARNING: ClusteredService(%s) failed during activity poll' % self.name) class BuildbotServiceManager(AsyncMultiService, config.ConfiguredMixin, ReconfigurableServiceMixin): config_attr = "services" name = "services" def getConfigDict(self): return {'name': self.name, 'childs': [v.getConfigDict() for v in self.namedServices.values()]} @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): # arrange childs by name old_by_name = self.namedServices old_set = set(old_by_name) new_config_attr = getattr(new_config, self.config_attr) if isinstance(new_config_attr, list): new_by_name = {s.name: s for s in new_config_attr} elif isinstance(new_config_attr, dict): new_by_name = new_config_attr else: raise TypeError( "config.%s should be a list or dictionary" % (self.config_attr)) new_set = set(new_by_name) # calculate new childs, by name, and removed childs removed_names, added_names = util.diffSets(old_set, new_set) # find any childs for which the fully qualified class name has # changed, and treat those as an add and remove # While we're at it find any service that don't know how to reconfig, # and, if they have changed, add them to both removed and added, so that we # run the new version for n in old_set & new_set: old = old_by_name[n] new = new_by_name[n] # detect changed class name if reflect.qual(old.__class__) != reflect.qual(new.__class__): removed_names.add(n) added_names.add(n) # compare using ComparableMixin if they don't support reconfig elif not hasattr(old, 'reconfigServiceWithBuildbotConfig'): if old != new: removed_names.add(n) added_names.add(n) if removed_names or added_names: log.msg("adding %d new %s, removing %d" % (len(added_names), self.config_attr, len(removed_names))) for n in removed_names: child = old_by_name[n] # disownServiceParent calls stopService after removing the relationship # as child might use self.master.data to stop itself, its better to stop it first # (this is related to the fact that self.master is found by recursively looking at self.parent # for a master) yield child.stopService() # it has already called, so do not call it again child.stopService = lambda: None yield child.disownServiceParent() # HACK: we still keep a reference to the master for some cleanup tasks which are not waited by # to stopService (like the complex worker disconnection mechanism) # http://trac.buildbot.net/ticket/3583 child.parent = self.master for n in added_names: child = new_by_name[n] # setup service's objectid if hasattr(child, 'objectid'): class_name = '%s.%s' % (child.__class__.__module__, child.__class__.__name__) objectid = yield self.master.db.state.getObjectId( child.name, class_name) child.objectid = objectid yield defer.maybeDeferred(child.setServiceParent, self) # As the services that were just added got # reconfigServiceWithSibling called by # setServiceParent->startService, # we avoid calling it again by selecting # in reconfigurable_services, services # that were not added just now reconfigurable_services = [svc for svc in self if svc.name not in added_names] # sort by priority reconfigurable_services.sort(key=lambda svc: -svc.reconfig_priority) for svc in reconfigurable_services: if not svc.name: raise ValueError( "{}: child {} should have a defined name attribute".format(self, svc)) config_sibling = new_by_name.get(svc.name) try: yield svc.reconfigServiceWithSibling(config_sibling) except NotImplementedError: # legacy support. Its too painful to transition old code to new Service life cycle # so we implement switch of child when the service raises NotImplementedError # Note this means that self will stop, and sibling will take ownership # means that we have a small time where the service is unavailable. yield svc.disownServiceParent() config_sibling.objectid = svc.objectid yield config_sibling.setServiceParent(self) buildbot-2.6.0/master/buildbot/util/ssl.py000066400000000000000000000027101361162603000205500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ This modules acts the same as twisted.internet.ssl except it does not raise ImportError Modules using this should call ensureHasSSL in order to make sure that the user installed buildbot[tls] """ import unittest from buildbot.config import error try: from twisted.internet.ssl import * # noqa pylint: disable=unused-wildcard-import, wildcard-import ssl_import_error = None has_ssl = True except ImportError as e: ssl_import_error = str(e) has_ssl = False def ensureHasSSL(module): if not has_ssl: error("TLS dependencies required for {} are not installed : {}\n pip install 'buildbot[tls]'".format( module, ssl_import_error)) def skipUnless(f): return unittest.skipUnless(has_ssl, "TLS dependencies required")(f) buildbot-2.6.0/master/buildbot/util/state.py000066400000000000000000000031731361162603000210730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer class StateMixin: # state management _objectid = None @defer.inlineCallbacks def getState(self, *args, **kwargs): # get the objectid, if not known if self._objectid is None: self._objectid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) rv = yield self.master.db.state.getState(self._objectid, *args, **kwargs) return rv @defer.inlineCallbacks def setState(self, key, value): # get the objectid, if not known if self._objectid is None: self._objectid = yield self.master.db.state.getObjectId(self.name, self.__class__.__name__) yield self.master.db.state.setState(self._objectid, key, value) buildbot-2.6.0/master/buildbot/util/subscription.py000066400000000000000000000047261361162603000225040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import Notifier class SubscriptionPoint: def __init__(self, name): self.name = name self.subscriptions = set() self._unfinished_deliveries = [] self._unfinished_notifier = Notifier() def __str__(self): return "" % self.name def subscribe(self, callback): sub = Subscription(self, callback) self.subscriptions.add(sub) return sub def deliver(self, *args, **kwargs): self._unfinished_deliveries.append(self) for sub in list(self.subscriptions): try: d = sub.callback(*args, **kwargs) if isinstance(d, defer.Deferred): self._unfinished_deliveries.append(d) d.addBoth(self._notify_delivery_finished, d) except Exception: log.err(failure.Failure(), 'while invoking callback %s to %s' % (sub.callback, self)) self._notify_delivery_finished(None, self) def waitForDeliveriesToFinish(self): # returns a deferred if not self._unfinished_deliveries: return defer.succeed(None) return self._unfinished_notifier.wait() def _unsubscribe(self, subscription): self.subscriptions.remove(subscription) def _notify_delivery_finished(self, _, d): self._unfinished_deliveries.remove(d) if not self._unfinished_deliveries: self._unfinished_notifier.notify(None) class Subscription: def __init__(self, subpt, callback): self.subpt = subpt self.callback = callback def unsubscribe(self): self.subpt._unsubscribe(self) buildbot-2.6.0/master/buildbot/util/tuplematch.py000066400000000000000000000016321361162603000221170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members def matchTuple(routingKey, filter): if len(filter) != len(routingKey): return False for k, f in zip(routingKey, filter): if f is not None and f != k: return False return True buildbot-2.6.0/master/buildbot/wamp/000077500000000000000000000000001361162603000173645ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/wamp/__init__.py000066400000000000000000000000001361162603000214630ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/wamp/connector.py000066400000000000000000000126551361162603000217410ustar00rootroot00000000000000# This file is part of . Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Team Members import txaio from autobahn.twisted.wamp import ApplicationSession from autobahn.twisted.wamp import Service from autobahn.wamp.exception import TransportLost from twisted.internet import defer from twisted.python import failure from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import service class MasterService(ApplicationSession, service.AsyncMultiService): """ concatenation of all the wamp services of buildbot """ def __init__(self, config): # Cannot use super() here. # We must explicitly call both parent constructors. ApplicationSession.__init__(self) service.AsyncMultiService.__init__(self) self.config = config self.leaving = False self.setServiceParent(config.extra['parent']) @defer.inlineCallbacks def onJoin(self, details): log.msg("Wamp connection succeed!") for handler in [self] + self.services: yield self.register(handler) yield self.subscribe(handler) yield self.publish("org.buildbot.%s.connected" % (self.master.masterid)) self.parent.service = self self.parent.serviceDeferred.callback(self) @defer.inlineCallbacks def onLeave(self, details): if self.leaving: return # XXX We don't handle crossbar reboot, or any other disconnection well. # this is a tricky problem, as we would have to reconnect with exponential backoff # re-subscribe to subscriptions, queue messages until reconnection. # This is quite complicated, and I believe much better handled in autobahn # It is possible that such failure is practically non-existent # so for now, we just crash the master log.msg("Guru meditation! We have been disconnected from wamp server") log.msg( "We don't know how to recover this without restarting the whole system") log.msg(str(details)) yield self.master.stopService() def onUserError(self, e, msg): log.err(e, msg) def make(config): if config: return MasterService(config) # if no config given, return a description of this WAMPlet .. return {'label': 'Buildbot master wamplet', 'description': 'This contains all the wamp methods provided by a buildbot master'} class WampConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService): serviceClass = Service name = "wamp" def __init__(self): super().__init__() self.app = self.router_url = None self.serviceDeferred = defer.Deferred() self.service = None def getService(self): if self.service is not None: return defer.succeed(self.service) d = defer.Deferred() @self.serviceDeferred.addCallback def gotService(service): d.callback(service) return service return d def stopService(self): if self.service is not None: self.service.leaving = True super().stopService() @defer.inlineCallbacks def publish(self, topic, data, options=None): service = yield self.getService() try: ret = yield service.publish(topic, data, options=options) except TransportLost: log.err(failure.Failure(), "while publishing event " + topic) return return ret @defer.inlineCallbacks def subscribe(self, callback, topic=None, options=None): service = yield self.getService() ret = yield service.subscribe(callback, topic, options) return ret @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): if new_config.mq.get('type', 'simple') != "wamp": return wamp = new_config.mq log.msg("Starting wamp with config: %r", wamp) router_url = wamp.get('router_url', None) # This is not a good idea to allow people to switch the router via reconfig # how would we continue the current transactions ? # how would we tell the workers to switch router ? if self.app is not None and self.router_url != router_url: raise ValueError( "Cannot use different wamp router url when reconfiguring") if router_url is None: return self.router_url = router_url self.app = self.serviceClass( url=self.router_url, extra=dict(master=self.master, parent=self), realm=bytes2unicode(wamp.get('realm', 'buildbot')), make=make ) wamp_debug_level = wamp.get('wamp_debug_level', 'error') txaio.set_global_log_level(wamp_debug_level) yield self.app.setServiceParent(self) yield super().reconfigServiceWithBuildbotConfig(new_config) buildbot-2.6.0/master/buildbot/worker/000077500000000000000000000000001361162603000177315ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/worker/__init__.py000066400000000000000000000016421361162603000220450ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot.worker.base import AbstractWorker from buildbot.worker.base import Worker from buildbot.worker.latent import AbstractLatentWorker _hush_pyflakes = [ AbstractWorker, Worker, AbstractLatentWorker, ] buildbot-2.6.0/master/buildbot/worker/base.py000066400000000000000000000666221361162603000212310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 import time from twisted.internet import defer from twisted.python import log from twisted.python.reflect import namedModule from zope.interface import implementer from buildbot import config from buildbot.interfaces import IWorker from buildbot.process import metrics from buildbot.process.properties import Properties from buildbot.status.worker import WorkerStatus from buildbot.util import Notifier from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util.eventual import eventually @implementer(IWorker) class AbstractWorker(service.BuildbotService): """This is the master-side representative for a remote buildbot worker. There is exactly one for each worker described in the config file (the c['workers'] list). When buildbots connect in (.attach), they get a reference to this instance. The BotMaster object is stashed as the .botmaster attribute. The BotMaster is also our '.parent' Service. I represent a worker -- a remote machine capable of running builds. I am instantiated by the configuration file, and can be subclassed to add extra functionality.""" # reconfig workers after builders reconfig_priority = 64 quarantine_timer = None quarantine_timeout = quarantine_initial_timeout = 10 quarantine_max_timeout = 60 * 60 start_missing_on_startup = True DEFAULT_MISSING_TIMEOUT = 3600 DEFAULT_KEEPALIVE_INTERVAL = 3600 # override to True if isCompatibleWithBuild may return False builds_may_be_incompatible = False def checkConfig(self, name, password, max_builds=None, notify_on_missing=None, missing_timeout=None, properties=None, defaultProperties=None, locks=None, keepalive_interval=DEFAULT_KEEPALIVE_INTERVAL, machine_name=None): """ @param name: botname this machine will supply when it connects @param password: password this machine will supply when it connects @param max_builds: maximum number of simultaneous builds that will be run concurrently on this worker (the default is None for no limit) @param properties: properties that will be applied to builds run on this worker @type properties: dictionary @param defaultProperties: properties that will be applied to builds run on this worker only if the property has not been set by another source @type defaultProperties: dictionary @param locks: A list of locks that must be acquired before this worker can be used @type locks: dictionary @param machine_name: The name of the machine to associate with the worker. """ self.name = name = bytes2unicode(name) self.machine_name = machine_name self.password = password # protocol registration self.registration = None self._graceful = False self._paused = False # these are set when the service is started self.manager = None self.workerid = None self.worker_status = WorkerStatus(name) self.worker_commands = None self.workerforbuilders = {} self.max_builds = max_builds self.access = [] if locks: self.access = locks self.lock_subscriptions = [] self.properties = Properties() self.properties.update(properties or {}, "Worker") self.properties.setProperty("workername", name, "Worker") self.defaultProperties = Properties() self.defaultProperties.update(defaultProperties or {}, "Worker") if self.machine_name is not None: self.properties.setProperty('machine_name', self.machine_name, 'Worker') self.machine = None self.lastMessageReceived = 0 if notify_on_missing is None: notify_on_missing = [] if isinstance(notify_on_missing, str): notify_on_missing = [notify_on_missing] self.notify_on_missing = notify_on_missing for i in notify_on_missing: if not isinstance(i, str): config.error( 'notify_on_missing arg %r is not a string' % (i,)) self.missing_timeout = missing_timeout self.missing_timer = None # a protocol connection, if we're currently connected self.conn = None # during disconnection self.conn will be set to None before all disconnection notifications # are delivered. During that period _pending_disconnection_delivery_notifier will be set to a # notifier and allows interested users to wait until all disconnection notifications are # delivered. self._pending_disconnection_delivery_notifier = None self._old_builder_list = None self._configured_builderid_list = None def __repr__(self): return "<%s %r>" % (self.__class__.__name__, self.name) @property def workername(self): # workername is now an alias to twisted.Service's name return self.name @property def botmaster(self): if self.master is None: return None return self.master.botmaster @defer.inlineCallbacks def updateLocks(self): """Convert the L{LockAccess} objects in C{self.locks} into real lock objects, while also maintaining the subscriptions to lock releases.""" # unsubscribe from any old locks for s in self.lock_subscriptions: s.unsubscribe() # convert locks into their real form locks = yield self.botmaster.getLockFromLockAccesses(self.access, self.config_version) self.locks = [(l.getLockForWorker(self.workername), la) for l, la in locks] self.lock_subscriptions = [l.subscribeToReleases(self._lockReleased) for l, la in self.locks] def locksAvailable(self): """ I am called to see if all the locks I depend on are available, in which I return True, otherwise I return False """ if not self.locks: return True for lock, access in self.locks: if not lock.isAvailable(self, access): return False return True def acquireLocks(self): """ I am called when a build is preparing to run. I try to claim all the locks that are needed for a build to happen. If I can't, then my caller should give up the build and try to get another worker to look at it. """ log.msg("acquireLocks(worker %s, locks %s)" % (self, self.locks)) if not self.locksAvailable(): log.msg("worker %s can't lock, giving up" % (self, )) return False # all locks are available, claim them all for lock, access in self.locks: lock.claim(self, access) return True def releaseLocks(self): """ I am called to release any locks after a build has finished """ log.msg("releaseLocks(%s): %s" % (self, self.locks)) for lock, access in self.locks: lock.release(self, access) def _lockReleased(self): """One of the locks for this worker was released; try scheduling builds.""" if not self.botmaster: return # oh well.. self.botmaster.maybeStartBuildsForWorker(self.name) def _applyWorkerInfo(self, info): if not info: return self.worker_status.setAdmin(info.get("admin")) self.worker_status.setHost(info.get("host")) self.worker_status.setAccessURI(info.get("access_uri", None)) self.worker_status.setVersion(info.get("version", "(unknown)")) # store everything as Properties for k, v in info.items(): if k in ('environ', 'worker_commands'): continue self.worker_status.info.setProperty(k, v, "Worker") @defer.inlineCallbacks def _getWorkerInfo(self): worker = yield self.master.data.get( ('workers', self.workerid)) self._applyWorkerInfo(worker['workerinfo']) def setServiceParent(self, parent): # botmaster needs to set before setServiceParent which calls # startService self.manager = parent return super().setServiceParent(parent) @defer.inlineCallbacks def startService(self): # tracks config version for locks self.config_version = self.master.config_version self.updateLocks() self.workerid = yield self.master.data.updates.findWorkerId( self.name) self.workerActionConsumer = yield self.master.mq.startConsuming(self.controlWorker, ("control", "worker", str(self.workerid), None)) yield self._getWorkerInfo() yield super().startService() # startMissingTimer wants the service to be running to really start if self.start_missing_on_startup: self.startMissingTimer() @defer.inlineCallbacks def reconfigService(self, name, password, max_builds=None, notify_on_missing=None, missing_timeout=DEFAULT_MISSING_TIMEOUT, properties=None, defaultProperties=None, locks=None, keepalive_interval=DEFAULT_KEEPALIVE_INTERVAL, machine_name=None): # Given a Worker config arguments, configure this one identically. # Because Worker objects are remotely referenced, we can't replace them # without disconnecting the worker, yet there's no reason to do that. assert self.name == name self.password = password # adopt new instance's configuration parameters self.max_builds = max_builds self.access = [] if locks: self.access = locks if notify_on_missing is None: notify_on_missing = [] if isinstance(notify_on_missing, str): notify_on_missing = [notify_on_missing] self.notify_on_missing = notify_on_missing if self.missing_timeout != missing_timeout: running_missing_timer = self.missing_timer self.stopMissingTimer() self.missing_timeout = missing_timeout if running_missing_timer: self.startMissingTimer() self.properties = Properties() self.properties.update(properties or {}, "Worker") self.properties.setProperty("workername", name, "Worker") self.defaultProperties = Properties() self.defaultProperties.update(defaultProperties or {}, "Worker") # Note that before first reconfig self.machine will always be None and # out of sync with self.machine_name, thus more complex logic is needed. if self.machine is not None and self.machine_name != machine_name: self.machine.unregisterWorker(self) self.machine = None self.machine_name = machine_name if self.machine is None and self.machine_name is not None: self.machine = self.master.machine_manager.getMachineByName(self.machine_name) if self.machine is not None: self.machine.registerWorker(self) self.properties.setProperty("machine_name", self.machine_name, "Worker") else: log.err("Unknown machine '{}' for worker '{}'".format( self.machine_name, self.name)) # update our records with the worker manager if not self.registration: self.registration = yield self.master.workers.register(self) yield self.registration.update(self, self.master.config) # tracks config version for locks self.config_version = self.master.config_version self.updateLocks() @defer.inlineCallbacks def reconfigServiceWithSibling(self, sibling): # reconfigServiceWithSibling will only reconfigure the worker when it is configured differently. # However, the worker configuration depends on which builder it is configured yield super().reconfigServiceWithSibling(sibling) # update the attached worker's notion of which builders are attached. # This assumes that the relevant builders have already been configured, # which is why the reconfig_priority is set low in this class. bids = [ b.getBuilderId() for b in self.botmaster.getBuildersForWorker(self.name)] bids = yield defer.gatherResults(bids, consumeErrors=True) if self._configured_builderid_list != bids: yield self.master.data.updates.workerConfigured(self.workerid, self.master.masterid, bids) yield self.updateWorker() self._configured_builderid_list = bids @defer.inlineCallbacks def stopService(self): if self.registration: yield self.registration.unregister() self.registration = None self.workerActionConsumer.stopConsuming() self.stopMissingTimer() self.stopQuarantineTimer() # mark this worker as configured for zero builders in this master yield self.master.data.updates.workerConfigured(self.workerid, self.master.masterid, []) # during master shutdown we need to wait until the disconnection notification deliveries # are completed, otherwise some of the events may still be firing long after the master # is completely shut down. yield self.disconnect() yield self.waitForCompleteShutdown() yield super().stopService() def isCompatibleWithBuild(self, build_props): # given a build properties object, determines whether the build is # compatible with the currently running worker or not. This is most # often useful for latent workers where it's possible to request # different kinds of workers. return defer.succeed(True) def startMissingTimer(self): if self.missing_timeout and self.parent and self.running: self.stopMissingTimer() # in case it's already running self.missing_timer = self.master.reactor.callLater(self.missing_timeout, self._missing_timer_fired) def stopMissingTimer(self): if self.missing_timer: if self.missing_timer.active(): self.missing_timer.cancel() self.missing_timer = None def isConnected(self): return self.conn def _missing_timer_fired(self): self.missing_timer = None # notify people, but only if we're still in the config if not self.parent: return last_connection = time.ctime(time.time() - self.missing_timeout) self.master.data.updates.workerMissing( workerid=self.workerid, masterid=self.master.masterid, last_connection=last_connection, notify=self.notify_on_missing ) def updateWorker(self): """Called to add or remove builders after the worker has connected. @return: a Deferred that indicates when an attached worker has accepted the new builders and/or released the old ones.""" if self.conn: return self.sendBuilderList() # else: return defer.succeed(None) @defer.inlineCallbacks def attached(self, conn): """This is called when the worker connects.""" assert self.conn is None metrics.MetricCountEvent.log("AbstractWorker.attached_workers", 1) # now we go through a sequence of calls, gathering information, then # tell the Botmaster that it can finally give this worker to all the # Builders that care about it. # Reset graceful shutdown status self._graceful = False self.conn = conn self._old_builder_list = None # clear builder list before proceed self.worker_status.setConnected(True) self._applyWorkerInfo(conn.info) self.worker_commands = conn.info.get("worker_commands", {}) self.worker_environ = conn.info.get("environ", {}) self.worker_basedir = conn.info.get("basedir", None) self.worker_system = conn.info.get("system", None) self.conn.notifyOnDisconnect(self.detached) workerinfo = { 'admin': conn.info.get('admin'), 'host': conn.info.get('host'), 'access_uri': conn.info.get('access_uri'), 'version': conn.info.get('version') } yield self.master.data.updates.workerConnected( workerid=self.workerid, masterid=self.master.masterid, workerinfo=workerinfo ) if self.worker_system == "nt": self.path_module = namedModule("ntpath") else: # most everything accepts / as separator, so posix should be a # reasonable fallback self.path_module = namedModule("posixpath") log.msg("bot attached") self.messageReceivedFromWorker() self.stopMissingTimer() yield self.updateWorker() yield self.botmaster.maybeStartBuildsForWorker(self.name) self.updateState() def messageReceivedFromWorker(self): now = time.time() self.lastMessageReceived = now self.worker_status.setLastMessageReceived(now) def setupProperties(self, props): for name in self.properties.properties: props.setProperty( name, self.properties.getProperty(name), "Worker") for name in self.defaultProperties.properties: if name not in props: props.setProperty( name, self.defaultProperties.getProperty(name), "Worker") @defer.inlineCallbacks def _handle_disconnection_delivery_notifier(self): self._pending_disconnection_delivery_notifier = Notifier() yield self.conn.waitForNotifyDisconnectedDelivered() self._pending_disconnection_delivery_notifier.notify(None) self._pending_disconnection_delivery_notifier = None @defer.inlineCallbacks def detached(self): # protect against race conditions in conn disconnect path and someone # calling detached directly. At the moment the null worker does that. if self.conn is None: return metrics.MetricCountEvent.log("AbstractWorker.attached_workers", -1) self._handle_disconnection_delivery_notifier() yield self.conn.waitShutdown() self.conn = None self._old_builder_list = [] self.worker_status.setConnected(False) log.msg("Worker.detached(%s)" % (self.name,)) self.releaseLocks() yield self.master.data.updates.workerDisconnected( workerid=self.workerid, masterid=self.master.masterid, ) def disconnect(self): """Forcibly disconnect the worker. This severs the TCP connection and returns a Deferred that will fire (with None) when the connection is probably gone. If the worker is still alive, they will probably try to reconnect again in a moment. This is called in two circumstances. The first is when a worker is removed from the config file. In this case, when they try to reconnect, they will be rejected as an unknown worker. The second is when we wind up with two connections for the same worker, in which case we disconnect the older connection. """ if self.conn is None: return defer.succeed(None) log.msg("disconnecting old worker %s now" % (self.name,)) # When this Deferred fires, we'll be ready to accept the new worker return self._disconnect(self.conn) def waitForCompleteShutdown(self): # This function waits until the disconnection to happen and the disconnection # notifications have been delivered and acted upon. return self._waitForCompleteShutdownImpl(self.conn) @defer.inlineCallbacks def _waitForCompleteShutdownImpl(self, conn): if conn: d = defer.Deferred() def _disconnected(): eventually(d.callback, None) conn.notifyOnDisconnect(_disconnected) yield d yield conn.waitForNotifyDisconnectedDelivered() elif self._pending_disconnection_delivery_notifier is not None: yield self._pending_disconnection_delivery_notifier.wait() @defer.inlineCallbacks def _disconnect(self, conn): # This function waits until the disconnection to happen and the disconnection # notifications have been delivered and acted upon d = self._waitForCompleteShutdownImpl(conn) conn.loseConnection() log.msg("waiting for worker to finish disconnecting") yield d @defer.inlineCallbacks def sendBuilderList(self): our_builders = self.botmaster.getBuildersForWorker(self.name) blist = [(b.name, b.config.workerbuilddir) for b in our_builders] if blist == self._old_builder_list: return slist = yield self.conn.remoteSetBuilderList(builders=blist) self._old_builder_list = blist # Nothing has changed, so don't need to re-attach to everything if not slist: return dl = [] for name in slist: # use get() since we might have changed our mind since then b = self.botmaster.builders.get(name) if b: d1 = self.attachBuilder(b) dl.append(d1) yield defer.DeferredList(dl) def attachBuilder(self, builder): return builder.attached(self, self.worker_commands) def controlWorker(self, key, params): log.msg("worker {} wants to {}: {}".format(self.name, key[-1], params)) if key[-1] == "stop": return self.shutdownRequested() if key[-1] == "pause": self.pause() if key[-1] == "unpause": self.unpause() if key[-1] == "kill": self.shutdown() def shutdownRequested(self): self._graceful = True self.maybeShutdown() self.updateState() def addWorkerForBuilder(self, wfb): self.workerforbuilders[wfb.builder_name] = wfb def removeWorkerForBuilder(self, wfb): try: del self.workerforbuilders[wfb.builder_name] except KeyError: pass def buildFinished(self, wfb): """This is called when a build on this worker is finished.""" self.botmaster.maybeStartBuildsForWorker(self.name) def canStartBuild(self): """ I am called when a build is requested to see if this worker can start a build. This function can be used to limit overall concurrency on the worker. Note for subclassers: if a worker can become willing to start a build without any action on that worker (for example, by a resource in use on another worker becoming available), then you must arrange for L{maybeStartBuildsForWorker} to be called at that time, or builds on this worker will not start. """ # If we're waiting to shutdown gracefully or paused, then we shouldn't # accept any new jobs. if self._graceful or self._paused: return False if self.max_builds: active_builders = [wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()] if len(active_builders) >= self.max_builds: return False if not self.locksAvailable(): return False return True @defer.inlineCallbacks def shutdown(self): """Shutdown the worker""" if not self.conn: log.msg("no remote; worker is already shut down") return yield self.conn.remoteShutdown() def maybeShutdown(self): """Shut down this worker if it has been asked to shut down gracefully, and has no active builders.""" if not self._graceful: return active_builders = [wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()] if active_builders: return d = self.shutdown() d.addErrback(log.err, 'error while shutting down worker') def updateState(self): self.master.data.updates.setWorkerState(self.workerid, self._paused, self._graceful) def pause(self): """Stop running new builds on the worker.""" self._paused = True self.updateState() def unpause(self): """Restart running new builds on the worker.""" self._paused = False self.botmaster.maybeStartBuildsForWorker(self.name) self.updateState() def isPaused(self): return self._paused def resetQuarantine(self): self.quarantine_timeout = self.quarantine_initial_timeout def putInQuarantine(self): if self.quarantine_timer: # already in quarantine return self.pause() self.quarantine_timer = self.master.reactor.callLater( self.quarantine_timeout, self.exitQuarantine) log.msg("{} has been put in quarantine for {}s".format( self.name, self.quarantine_timeout)) # next we will wait twice as long self.quarantine_timeout *= 2 if self.quarantine_timeout > self.quarantine_max_timeout: # unless we hit the max timeout self.quarantine_timeout = self.quarantine_max_timeout def exitQuarantine(self): self.quarantine_timer = None self.unpause() def stopQuarantineTimer(self): if self.quarantine_timer is not None: self.quarantine_timer.cancel() self.quarantine_timer = None self.unpause() class Worker(AbstractWorker): @defer.inlineCallbacks def detached(self): yield super().detached() self.botmaster.workerLost(self) self.startMissingTimer() @defer.inlineCallbacks def attached(self, bot): try: yield super().attached(bot) except Exception as e: log.err(e, "worker %s cannot attach" % (self.name,)) return def buildFinished(self, wfb): """This is called when a build on this worker is finished.""" super().buildFinished(wfb) # If we're gracefully shutting down, and we have no more active # builders, then it's safe to disconnect self.maybeShutdown() buildbot-2.6.0/master/buildbot/worker/docker.py000066400000000000000000000326371361162603000215650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib import json import socket from io import BytesIO from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerCannotSubstantiate from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import unicode2bytes from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.worker import AbstractLatentWorker try: import docker from docker import client from docker.errors import NotFound _hush_pyflakes = [docker, client] docker_py_version = float(docker.__version__.rsplit(".", 1)[0]) except ImportError: docker = None client = None docker_py_version = 0.0 def _handle_stream_line(line): """\ Input is the json representation of: {'stream': "Content\ncontent"} Output is a generator yield "Content", and then "content" """ # XXX This necessary processing is probably a bug from docker-py, # hence, might break if the bug is fixed, i.e. we should get decoded JSON # directly from the API. line = json.loads(line) if 'error' in line: content = "ERROR: " + line['error'] else: content = line.get('stream', '') for streamline in content.split('\n'): if streamline: yield streamline class DockerBaseWorker(AbstractLatentWorker): def checkConfig(self, name, password=None, image=None, masterFQDN=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if image is not None and not isinstance(image, str): if not hasattr(image, 'getRenderingFor'): config.error("image must be a string") super().checkConfig(name, password, **kwargs) def reconfigService(self, name, password=None, image=None, masterFQDN=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if password is None: password = self.getRandomPass() if masterFQDN is None: masterFQDN = socket.getfqdn() self.masterFQDN = masterFQDN self.image = image masterName = unicode2bytes(self.master.name) self.masterhash = hashlib.sha1(masterName).hexdigest()[:6] return super().reconfigService(name, password, **kwargs) def getContainerName(self): return ('buildbot-{worker}-{hash}'.format(worker=self.workername, hash=self.masterhash)).replace("_", "-") @property def shortid(self): if self.instance is None: return None return self.instance['Id'][:6] def createEnvironment(self, build=None): result = { "BUILDMASTER": self.masterFQDN, "WORKERNAME": self.name, "WORKERPASS": self.password } if self.registration is not None: result["BUILDMASTER_PORT"] = str(self.registration.getPBPort()) if ":" in self.masterFQDN: result["BUILDMASTER"], result["BUILDMASTER_PORT"] = self.masterFQDN.split(":") return result @staticmethod def get_fqdn(): return socket.getfqdn() @staticmethod def get_ip(): fqdn = socket.getfqdn() try: return socket.gethostbyname(fqdn) except socket.gaierror: return fqdn class DockerLatentWorker(DockerBaseWorker, CompatibleLatentWorkerMixin): instance = None def checkConfig(self, name, password, docker_host, image=None, command=None, volumes=None, dockerfile=None, version=None, tls=None, followStartupLogs=False, masterFQDN=None, hostconfig=None, autopull=False, alwaysPull=False, custom_context=False, encoding='gzip', buildargs=None, **kwargs): super().checkConfig(name, password, image, masterFQDN, **kwargs) if not client: config.error("The python module 'docker>=2.0' is needed to use a" " DockerLatentWorker") if not image and not dockerfile: config.error("DockerLatentWorker: You need to specify at least" " an image name, or a dockerfile") # Following block is only for checking config errors, # actual parsing happens in self.parse_volumes() # Renderables can be direct volumes definition or list member if isinstance(volumes, list): for volume_string in (volumes or []): if not isinstance(volume_string, str): continue try: bind, volume = volume_string.split(":", 1) except ValueError: config.error("Invalid volume definition for docker " "%s. Skipping..." % volume_string) continue @defer.inlineCallbacks def reconfigService(self, name, password, docker_host, image=None, command=None, volumes=None, dockerfile=None, version=None, tls=None, followStartupLogs=False, masterFQDN=None, hostconfig=None, autopull=False, alwaysPull=False, custom_context=False, encoding='gzip', buildargs=None, **kwargs): yield super().reconfigService(name, password, image, masterFQDN, **kwargs) self.volumes = volumes or [] self.followStartupLogs = followStartupLogs self.command = command or [] self.dockerfile = dockerfile self.hostconfig = hostconfig or {} self.autopull = autopull self.alwaysPull = alwaysPull self.custom_context = custom_context self.encoding = encoding self.buildargs = buildargs # Prepare the parameters for the Docker Client object. self.client_args = {'base_url': docker_host} if version is not None: self.client_args['version'] = version if tls is not None: self.client_args['tls'] = tls def _thd_parse_volumes(self, volumes): volume_list = [] for volume_string in (volumes or []): try: _, volume = volume_string.split(":", 1) except ValueError: config.error("Invalid volume definition for docker " "%s. Skipping..." % volume_string) continue if volume.endswith(':ro') or volume.endswith(':rw'): volume = volume[:-3] volume_list.append(volume) return volume_list, volumes def _getDockerClient(self): if docker.version[0] == '1': docker_client = client.Client(**self.client_args) else: docker_client = client.APIClient(**self.client_args) return docker_client def renderWorkerProps(self, build): return build.render((self.image, self.dockerfile, self.volumes, self.custom_context, self.encoding, self.buildargs)) @defer.inlineCallbacks def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') image, dockerfile, volumes, custom_context, encoding, buildargs = \ yield self.renderWorkerPropsOnStart(build) res = yield threads.deferToThread(self._thd_start_instance, image, dockerfile, volumes, custom_context, encoding, buildargs) defer.returnValue(res) def _image_exists(self, client, name): # Make sure the image exists for image in client.images(): for tag in image['RepoTags'] or []: if ':' in name and tag == name: return True if tag.startswith(name + ':'): return True return False def _thd_start_instance(self, image, dockerfile, volumes, custom_context, encoding, buildargs): docker_client = self._getDockerClient() container_name = self.getContainerName() # cleanup the old instances instances = docker_client.containers( all=1, filters=dict(name=container_name)) container_name = "/{0}".format(container_name) for instance in instances: if container_name not in instance['Names']: continue try: docker_client.remove_container(instance['Id'], v=True, force=True) except NotFound: pass # that's a race condition found = False if image is not None: found = self._image_exists(docker_client, image) else: image = '%s_%s_image' % (self.workername, id(self)) if (not found) and (dockerfile is not None): log.msg("Image '%s' not found, building it from scratch" % image) if (custom_context): with open(dockerfile, 'rb') as fin: lines = docker_client.build(fileobj=fin, custom_context=custom_context, encoding=encoding, tag=image, buildargs=buildargs) else: lines = docker_client.build( fileobj=BytesIO(dockerfile.encode('utf-8')), tag=image, ) for line in lines: for streamline in _handle_stream_line(line): log.msg(streamline) imageExists = self._image_exists(docker_client, image) if ((not imageExists) or self.alwaysPull) and self.autopull: if (not imageExists): log.msg("Image '%s' not found, pulling from registry" % image) docker_client.pull(image) if (not self._image_exists(docker_client, image)): log.msg("Image '%s' not found" % image) raise LatentWorkerCannotSubstantiate( 'Image "%s" not found on docker host.' % image ) volumes, binds = self._thd_parse_volumes(volumes) host_conf = self.hostconfig.copy() host_conf['binds'] = binds if docker_py_version >= 2.2: host_conf['init'] = True host_conf = docker_client.create_host_config(**host_conf) instance = docker_client.create_container( image, self.command, name=self.getContainerName(), volumes=volumes, environment=self.createEnvironment(), host_config=host_conf ) if instance.get('Id') is None: log.msg('Failed to create the container') raise LatentWorkerFailedToSubstantiate( 'Failed to start container' ) shortid = instance['Id'][:6] log.msg('Container created, Id: %s...' % (shortid,)) instance['image'] = image self.instance = instance docker_client.start(instance) log.msg('Container started') if self.followStartupLogs: logs = docker_client.attach( container=instance, stdout=True, stderr=True, stream=True) for line in logs: log.msg("docker VM %s: %s" % (shortid, line.strip())) if self.conn: break del logs return [instance['Id'], image] def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return defer.succeed(None) instance = self.instance self.instance = None return threads.deferToThread(self._thd_stop_instance, instance, fast) def _thd_stop_instance(self, instance, fast): docker_client = self._getDockerClient() log.msg('Stopping container %s...' % instance['Id'][:6]) docker_client.stop(instance['Id']) if not fast: docker_client.wait(instance['Id']) docker_client.remove_container(instance['Id'], v=True, force=True) if self.image is None: try: docker_client.remove_image(image=instance['image']) except docker.errors.APIError as e: log.msg('Error while removing the image: %s', e) buildbot-2.6.0/master/buildbot/worker/ec2.py000066400000000000000000000624671361162603000207730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 """ A latent worker that uses EC2 to instantiate the workers on demand. Tested with Python boto 1.5c """ import os import re import time from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.worker import AbstractLatentWorker from buildbot.worker_transition import reportDeprecatedWorkerNameUsage try: import boto3 import botocore from botocore.client import ClientError except ImportError: boto3 = None PENDING = 'pending' RUNNING = 'running' SHUTTINGDOWN = 'shutting-down' TERMINATED = 'terminated' SPOT_REQUEST_PENDING_STATES = ['pending-evaluation', 'pending-fulfillment'] FULFILLED = 'fulfilled' PRICE_TOO_LOW = 'price-too-low' class EC2LatentWorker(AbstractLatentWorker): instance = image = None _poll_resolution = 5 # hook point for tests def __init__(self, name, password, instance_type, ami=None, valid_ami_owners=None, valid_ami_location_regex=None, elastic_ip=None, identifier=None, secret_identifier=None, aws_id_file_path=None, user_data=None, region=None, keypair_name=None, security_name=None, spot_instance=False, max_spot_price=1.6, volumes=None, placement=None, price_multiplier=1.2, tags=None, product_description='Linux/UNIX', subnet_id=None, security_group_ids=None, instance_profile_name=None, block_device_map=None, session=None, **kwargs): if not boto3: config.error("The python module 'boto3' is needed to use a " "EC2LatentWorker") if keypair_name is None: config.error("EC2LatentWorker: 'keypair_name' parameter must be " "specified") if security_name is None and not subnet_id: config.error("EC2LatentWorker: 'security_name' parameter must be " "specified") if volumes is None: volumes = [] if tags is None: tags = {} super().__init__(name, password, **kwargs) if security_name and subnet_id: raise ValueError( 'security_name (EC2 classic security groups) is not supported ' 'in a VPC. Use security_group_ids instead.') if not ((ami is not None) ^ (valid_ami_owners is not None or valid_ami_location_regex is not None)): raise ValueError( 'You must provide either a specific ami, or one or both of ' 'valid_ami_location_regex and valid_ami_owners') self.ami = ami if valid_ami_owners is not None: if isinstance(valid_ami_owners, int): valid_ami_owners = (valid_ami_owners,) else: for element in valid_ami_owners: if not isinstance(element, int): raise ValueError( 'valid_ami_owners should be int or iterable ' 'of ints', element) if valid_ami_location_regex is not None: if not isinstance(valid_ami_location_regex, str): raise ValueError( 'valid_ami_location_regex should be a string') # pre-compile the regex valid_ami_location_regex = re.compile(valid_ami_location_regex) if spot_instance and price_multiplier is None and max_spot_price is None: raise ValueError('You must provide either one, or both, of ' 'price_multiplier or max_spot_price') self.valid_ami_owners = None if valid_ami_owners: self.valid_ami_owners = [str(o) for o in valid_ami_owners] self.valid_ami_location_regex = valid_ami_location_regex self.instance_type = instance_type self.keypair_name = keypair_name self.security_name = security_name self.user_data = user_data self.spot_instance = spot_instance self.max_spot_price = max_spot_price self.volumes = volumes self.price_multiplier = price_multiplier self.product_description = product_description if None not in [placement, region]: self.placement = '%s%s' % (region, placement) else: self.placement = None if identifier is None: assert secret_identifier is None, ( 'supply both or neither of identifier, secret_identifier') if aws_id_file_path is None: home = os.environ['HOME'] default_path = os.path.join(home, '.ec2', 'aws_id') if os.path.exists(default_path): aws_id_file_path = default_path if aws_id_file_path: log.msg('WARNING: EC2LatentWorker is using deprecated ' 'aws_id file') with open(aws_id_file_path, 'r') as aws_file: identifier = aws_file.readline().strip() secret_identifier = aws_file.readline().strip() else: assert aws_id_file_path is None, \ 'if you supply the identifier and secret_identifier, ' \ 'do not specify the aws_id_file_path' assert secret_identifier is not None, \ 'supply both or neither of identifier, secret_identifier' region_found = None # Make the EC2 connection. self.session = session if self.session is None: if region is not None: for r in boto3.Session( aws_access_key_id=identifier, aws_secret_access_key=secret_identifier).get_available_regions('ec2'): if r == region: region_found = r if region_found is not None: self.session = boto3.Session( region_name=region, aws_access_key_id=identifier, aws_secret_access_key=secret_identifier) else: raise ValueError( 'The specified region does not exist: ' + region) else: # boto2 defaulted to us-east-1 when region was unset, we # mimic this here in boto3 region = botocore.session.get_session().get_config_variable('region') if region is None: region = 'us-east-1' self.session = boto3.Session( aws_access_key_id=identifier, aws_secret_access_key=secret_identifier, region_name=region ) self.ec2 = self.session.resource('ec2') self.ec2_client = self.session.client('ec2') # Make a keypair # # We currently discard the keypair data because we don't need it. # If we do need it in the future, we will always recreate the keypairs # because there is no way to # programmatically retrieve the private key component, unless we # generate it and store it on the filesystem, which is an unnecessary # usage requirement. try: self.ec2.KeyPair(self.keypair_name).load() # key_pair.delete() # would be used to recreate except ClientError as e: if 'InvalidKeyPair.NotFound' not in str(e): if 'AuthFailure' in str(e): log.msg('POSSIBLE CAUSES OF ERROR:\n' ' Did you supply your AWS credentials?\n' ' Did you sign up for EC2?\n' ' Did you put a credit card number in your AWS ' 'account?\n' 'Please doublecheck before reporting a problem.\n') raise # make one; we would always do this, and stash the result, if we # needed the key (for instance, to SSH to the box). We'd then # use paramiko to use the key to connect. self.ec2.create_key_pair(KeyName=keypair_name) # create security group if security_name: try: self.ec2_client.describe_security_groups(GroupNames=[security_name]) except ClientError as e: if 'InvalidGroup.NotFound' in str(e): self.security_group = self.ec2.create_security_group( GroupName=security_name, Description='Authorization to access the buildbot instance.') # Authorize the master as necessary # TODO this is where we'd open the hole to do the reverse pb # connect to the buildbot # ip = urllib.urlopen( # 'http://checkip.amazonaws.com').read().strip() # self.security_group.authorize('tcp', 22, 22, '%s/32' % ip) # self.security_group.authorize('tcp', 80, 80, '%s/32' % ip) else: raise # get the image if self.ami is not None: self.image = self.ec2.Image(self.ami) else: # verify we have access to at least one acceptable image discard = self.get_image() assert discard # get the specified elastic IP, if any if elastic_ip is not None: # Using ec2.vpc_addresses.filter(PublicIps=[elastic_ip]) throws a # NotImplementedError("Filtering not supported in describe_address.") in moto # https://github.com/spulec/moto/blob/100ec4e7c8aa3fde87ff6981e2139768816992e4/moto/ec2/responses/elastic_ip_addresses.py#L52 addresses = self.ec2.meta.client.describe_addresses( PublicIps=[elastic_ip])['Addresses'] if not addresses: raise ValueError( 'Could not find EIP for IP: ' + elastic_ip) allocation_id = addresses[0]['AllocationId'] elastic_ip = self.ec2.VpcAddress(allocation_id) self.elastic_ip = elastic_ip self.subnet_id = subnet_id self.security_group_ids = security_group_ids self.classic_security_groups = [ self.security_name] if self.security_name else None self.instance_profile_name = instance_profile_name self.tags = tags self.block_device_map = self.create_block_device_mapping( block_device_map) if block_device_map else None def create_block_device_mapping(self, mapping_definitions): if isinstance(mapping_definitions, list): for mapping_definition in mapping_definitions: ebs = mapping_definition.get('Ebs') if ebs: ebs.setdefault('DeleteOnTermination', True) return mapping_definitions reportDeprecatedWorkerNameUsage( "Use of dict value to 'block_device_map' of EC2LatentWorker " "constructor is deprecated. Please use a list matching the AWS API " "https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_BlockDeviceMapping.html" ) return self._convert_deprecated_block_device_mapping(mapping_definitions) def _convert_deprecated_block_device_mapping(self, mapping_definitions): new_mapping_definitions = [] for dev_name, dev_config in mapping_definitions.items(): new_dev_config = {} new_dev_config['DeviceName'] = dev_name if dev_config: new_dev_config['Ebs'] = {} new_dev_config['Ebs']['DeleteOnTermination'] = dev_config.get( 'delete_on_termination', True) new_dev_config['Ebs'][ 'Encrypted'] = dev_config.get('encrypted') new_dev_config['Ebs']['Iops'] = dev_config.get('iops') new_dev_config['Ebs'][ 'SnapshotId'] = dev_config.get('snapshot_id') new_dev_config['Ebs']['VolumeSize'] = dev_config.get('size') new_dev_config['Ebs'][ 'VolumeType'] = dev_config.get('volume_type') new_dev_config['Ebs'] = self._remove_none_opts( new_dev_config['Ebs']) new_mapping_definitions.append(new_dev_config) return new_mapping_definitions def get_image(self): # pylint: disable=too-many-nested-blocks if self.image is not None: return self.image images = self.ec2.images.all() if self.valid_ami_owners: images = images.filter(Owners=self.valid_ami_owners) if self.valid_ami_location_regex: level = 0 options = [] get_match = self.valid_ami_location_regex.match for image in images: # Image must be available if image.state != 'available': continue # Image must match regex match = get_match(image.image_location) if not match: continue # Gather sorting information alpha_sort = int_sort = None if level < 2: try: alpha_sort = match.group(1) except IndexError: level = 2 else: if level == 0: try: int_sort = int(alpha_sort) except ValueError: level = 1 options.append([int_sort, alpha_sort, image.image_location, image.id, image]) if level: log.msg('sorting images at level %d' % level) options = [candidate[level:] for candidate in options] else: options = [(image.image_location, image.id, image) for image in images] options.sort() log.msg('sorted images (last is chosen): %s' % (', '.join( ['%s (%s)' % (candidate[-1].id, candidate[-1].image_location) for candidate in options]))) if not options: raise ValueError('no available images match constraints') return options[-1][-1] def dns(self): if self.instance is None: return None return self.instance.public_dns_name dns = property(dns) def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') if self.spot_instance: return threads.deferToThread(self._request_spot_instance) return threads.deferToThread(self._start_instance) def _remove_none_opts(self, *args, **opts): if args: opts = args[0] return dict((k, v) for k, v in opts.items() if v is not None) def _start_instance(self): image = self.get_image() launch_opts = dict( ImageId=image.id, KeyName=self.keypair_name, SecurityGroups=self.classic_security_groups, InstanceType=self.instance_type, UserData=self.user_data, Placement=self._remove_none_opts( AvailabilityZone=self.placement, ), MinCount=1, MaxCount=1, SubnetId=self.subnet_id, SecurityGroupIds=self.security_group_ids, IamInstanceProfile=self._remove_none_opts( Name=self.instance_profile_name, ), BlockDeviceMappings=self.block_device_map ) launch_opts = self._remove_none_opts(launch_opts) reservations = self.ec2.create_instances( **launch_opts ) self.instance = reservations[0] instance_id, start_time = self._wait_for_instance() if None not in [instance_id, image.id, start_time]: return [instance_id, image.id, start_time] else: self.failed_to_start(self.instance.id, self.instance.state['Name']) def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return defer.succeed(None) instance = self.instance self.output = self.instance = None return threads.deferToThread( self._stop_instance, instance, fast) def _attach_volumes(self): for volume_id, device_node in self.volumes: vol = self.ec2.Volume(volume_id) vol.attach_to_instance( InstanceId=self.instance.id, Device=device_node) log.msg('Attaching EBS volume %s to %s.' % (volume_id, device_node)) def _stop_instance(self, instance, fast): if self.elastic_ip is not None: self.elastic_ip.association.delete() instance.reload() if instance.state['Name'] not in (SHUTTINGDOWN, TERMINATED): instance.terminate() log.msg('%s %s terminating instance %s' % (self.__class__.__name__, self.workername, instance.id)) duration = 0 interval = self._poll_resolution if fast: goal = (SHUTTINGDOWN, TERMINATED) instance.reload() else: goal = (TERMINATED,) while instance.state['Name'] not in goal: time.sleep(interval) duration += interval if duration % 60 == 0: log.msg( '%s %s has waited %d minutes for instance %s to end' % (self.__class__.__name__, self.workername, duration // 60, instance.id)) instance.reload() log.msg('%s %s instance %s %s ' 'after about %d minutes %d seconds' % (self.__class__.__name__, self.workername, instance.id, goal, duration // 60, duration % 60)) def _bid_price_from_spot_price_history(self): timestamp_yesterday = time.gmtime(int(time.time() - 86400)) spot_history_starttime = time.strftime( '%Y-%m-%dT%H:%M:%SZ', timestamp_yesterday) spot_prices = self.ec2.meta.client.describe_spot_price_history( StartTime=spot_history_starttime, ProductDescriptions=[self.product_description], AvailabilityZone=self.placement) price_sum = 0.0 price_count = 0 for price in spot_prices['SpotPriceHistory']: if price['InstanceType'] == self.instance_type: price_sum += float(price['SpotPrice']) price_count += 1 if price_count == 0: bid_price = 0.02 else: bid_price = (price_sum / price_count) * self.price_multiplier return bid_price def _request_spot_instance(self): if self.price_multiplier is None: bid_price = self.max_spot_price else: bid_price = self._bid_price_from_spot_price_history() if self.max_spot_price is not None \ and bid_price > self.max_spot_price: bid_price = self.max_spot_price log.msg('%s %s requesting spot instance with price %0.4f' % (self.__class__.__name__, self.workername, bid_price)) reservations = self.ec2.meta.client.request_spot_instances( SpotPrice=str(bid_price), LaunchSpecification=self._remove_none_opts( ImageId=self.ami, KeyName=self.keypair_name, SecurityGroups=self.classic_security_groups, UserData=self.user_data, InstanceType=self.instance_type, Placement=self._remove_none_opts( AvailabilityZone=self.placement, ), SubnetId=self.subnet_id, SecurityGroupIds=self.security_group_ids, BlockDeviceMappings=self.block_device_map, IamInstanceProfile=self._remove_none_opts( Name=self.instance_profile_name, ) ) ) request, success = self._wait_for_request( reservations['SpotInstanceRequests'][0]) if not success: raise LatentWorkerFailedToSubstantiate() instance_id = request['InstanceId'] self.instance = self.ec2.Instance(instance_id) image = self.get_image() instance_id, start_time = self._wait_for_instance() return instance_id, image.id, start_time def _wait_for_instance(self): log.msg('%s %s waiting for instance %s to start' % (self.__class__.__name__, self.workername, self.instance.id)) duration = 0 interval = self._poll_resolution while self.instance.state['Name'] == PENDING: time.sleep(interval) duration += interval if duration % 60 == 0: log.msg('%s %s has waited %d minutes for instance %s' % (self.__class__.__name__, self.workername, duration // 60, self.instance.id)) self.instance.reload() if self.instance.state['Name'] == RUNNING: self.properties.setProperty("instance", self.instance.id, "Worker") self.output = self.instance.console_output().get('Output') minutes = duration // 60 seconds = duration % 60 log.msg('%s %s instance %s started on %s ' 'in about %d minutes %d seconds (%s)' % (self.__class__.__name__, self.workername, self.instance.id, self.dns, minutes, seconds, self.output)) if self.elastic_ip is not None: self.elastic_ip.associate(InstanceId=self.instance.id) start_time = '%02d:%02d:%02d' % ( minutes // 60, minutes % 60, seconds) if self.volumes: self._attach_volumes() if self.tags: self.instance.create_tags(Tags=[{"Key": k, "Value": v} for k, v in self.tags.items()]) return self.instance.id, start_time else: self.failed_to_start(self.instance.id, self.instance.state['Name']) def _wait_for_request(self, reservation): duration = 0 interval = self._poll_resolution requests = self.ec2.meta.client.describe_spot_instance_requests( SpotInstanceRequestIds=[reservation['SpotInstanceRequestId']]) request = requests['SpotInstanceRequests'][0] request_status = request['Status']['Code'] while request_status in SPOT_REQUEST_PENDING_STATES: time.sleep(interval) duration += interval if duration % 60 == 0: log.msg('%s %s has waited %d minutes for spot request %s' % (self.__class__.__name__, self.workername, duration // 60, request['SpotInstanceRequestId'])) requests = self.ec2.meta.client.describe_spot_instance_requests( SpotInstanceRequestIds=[reservation['SpotInstanceRequestId']]) request = requests['SpotInstanceRequests'][0] request_status = request['Status']['Code'] if request_status == FULFILLED: minutes = duration // 60 seconds = duration % 60 log.msg('%s %s spot request %s fulfilled ' 'in about %d minutes %d seconds' % (self.__class__.__name__, self.workername, request['SpotInstanceRequestId'], minutes, seconds)) return request, True elif request_status == PRICE_TOO_LOW: self.ec2.meta.client.cancel_spot_instance_requests( SpotInstanceRequestIds=[request['SpotInstanceRequestId']]) log.msg('%s %s spot request rejected, spot price too low' % (self.__class__.__name__, self.workername)) raise LatentWorkerFailedToSubstantiate( request['SpotInstanceRequestId'], request_status) else: log.msg('%s %s failed to fulfill spot request %s with status %s' % (self.__class__.__name__, self.workername, request['SpotInstanceRequestId'], request_status)) # try to cancel, just for good measure self.ec2.meta.client.cancel_spot_instance_requests( SpotInstanceRequestIds=[request['SpotInstanceRequestId']]) raise LatentWorkerFailedToSubstantiate( request['SpotInstanceRequestId'], request_status) buildbot-2.6.0/master/buildbot/worker/kubernetes.py000066400000000000000000000113031361162603000224500ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util import kubeclientservice from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.util.logger import Logger from buildbot.worker.docker import DockerBaseWorker log = Logger() class KubeLatentWorker(DockerBaseWorker, CompatibleLatentWorkerMixin): instance = None builds_may_be_incompatible = True @defer.inlineCallbacks def getPodSpec(self, build): image = yield build.render(self.image) env = yield self.createEnvironment(build) defer.returnValue({ "apiVersion": "v1", "kind": "Pod", "metadata": { "name": self.getContainerName() }, "spec": { "containers": [{ "name": self.getContainerName(), "image": image, "env": [{ "name": k, "value": v } for k, v in env.items()], "resources": (yield self.getBuildContainerResources(build)) }] + (yield self.getServicesContainers(build)), "restartPolicy": "Never" } }) def getBuildContainerResources(self, build): # customization point to generate Build container resources return {} def getServicesContainers(self, build): # customization point to create services containers around the build container # those containers will run within the same localhost as the build container (aka within the same pod) return [] def renderWorkerProps(self, build_props): return self.getPodSpec(build_props) def checkConfig(self, name, image='buildbot/buildbot-worker', namespace=None, masterFQDN=None, kube_config=None, **kwargs): super().checkConfig(name, None, **kwargs) kubeclientservice.KubeClientService.checkAvailable( self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, name, image='buildbot/buildbot-worker', namespace=None, masterFQDN=None, kube_config=None, **kwargs): # Set build_wait_timeout to 0 if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 if masterFQDN is None: masterFQDN = self.get_ip if callable(masterFQDN): masterFQDN = masterFQDN() yield super().reconfigService(name, image=image, masterFQDN=masterFQDN, **kwargs) self._kube = yield kubeclientservice.KubeClientService.getService( self.master, kube_config=kube_config) self.namespace = namespace or self._kube.namespace @defer.inlineCallbacks def start_instance(self, build): yield self.stop_instance(reportFailure=False) pod_spec = yield self.renderWorkerPropsOnStart(build) try: yield self._kube.createPod(self.namespace, pod_spec) except kubeclientservice.KubeError as e: raise LatentWorkerFailedToSubstantiate(str(e)) defer.returnValue(True) @defer.inlineCallbacks def stop_instance(self, fast=False, reportFailure=True): self.current_pod_spec = None try: yield self._kube.deletePod(self.namespace, self.getContainerName()) except kubeclientservice.KubeError as e: if reportFailure and e.reason != 'NotFound': raise if fast: return yield self._kube.waitForPodDeletion( self.namespace, self.getContainerName(), timeout=self.missing_timeout) buildbot-2.6.0/master/buildbot/worker/latent.py000066400000000000000000000506461361162603000216050ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright Canonical Ltd. 2009 import enum import random import string from twisted.internet import defer from twisted.python import failure from twisted.python import log from zope.interface import implementer from buildbot.interfaces import ILatentMachine from buildbot.interfaces import ILatentWorker from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.interfaces import LatentWorkerSubstantiatiationCancelled from buildbot.util import Notifier from buildbot.worker.base import AbstractWorker class States(enum.Enum): # Represents the states of AbstractLatentWorker NOT_SUBSTANTIATED = 0 # When in this state, self._substantiation_notifier is waited on. The # notifier is notified immediately after the state transition out of # SUBSTANTIATING. SUBSTANTIATING = 1 # This is the same as SUBSTANTIATING, the difference is that start_instance # has been called SUBSTANTIATING_STARTING = 2 SUBSTANTIATED = 3 # When in this state, self._start_stop_lock is held. INSUBSTANTIATING = 4 # This state represents the case when insubstantiation is in progress and # we also request substantiation at the same time. Substantiation will be # started as soon as insubstantiation completes. Note, that the opposite # actions are not supported: insubstantiation during substantiation will # cancel the substantiation. # # When in this state, self._start_stop_lock is held. # # When in this state self.substantiation_build is not None. INSUBSTANTIATING_SUBSTANTIATING = 5 @implementer(ILatentWorker) class AbstractLatentWorker(AbstractWorker): """A worker that will start up a worker instance when needed. To use, subclass and implement start_instance and stop_instance. Additionally, if the instances render any kind of data affecting instance type from the build properties, set the class variable builds_may_be_incompatible to True and override isCompatibleWithBuild method. See ec2.py for a concrete example. """ substantiation_build = None build_wait_timer = None start_missing_on_startup = False # override if the latent worker may connect without substantiate. Most # often this will be used in workers whose lifetime is managed by # latent machines. starts_without_substantiate = False # Caveats: The handling of latent workers is much more complex than it # might seem. The code must handle at least the following conditions: # # - non-silent disconnection by the worker at any time which generated # TCP resets and in the end resulted in detached() being called # # - silent disconnection by worker at any time by silent TCP connection # failure which did not generate TCP resets, but on the other hand no # response may be received. self.conn is not None is that case. # # - no disconnection by worker during substantiation when # build_wait_timeout param is negative. # # - worker attaching before start_instance returned. # # The above means that the following parts of the state must be tracked separately and can # result in various state combinations: # - connection state of the worker (self.conn) # - intended state of the worker (self.state) # - whether start_instance() has been called and has not yet finished. state = States.NOT_SUBSTANTIATED ''' state transitions: substantiate(): either of NOT_SUBSTANTIATED -> SUBSTANTIATING INSUBSTANTIATING -> INSUBSTANTIATING_SUBSTANTIATING _substantiate(): either of: SUBSTANTIATING -> SUBSTANTIATING_STARTING SUBSTANTIATING -> SUBSTANTIATING_STARTING -> SUBSTANTIATED attached(): either of: SUBSTANTIATING -> SUBSTANTIATED SUBSTANTIATING_STARTING -> SUBSTANTIATED then: self.conn -> not None detached(): self.conn -> None errors in any of above will call insubstantiate() insubstantiate(): either of: SUBSTANTIATED -> INSUBSTANTIATING INSUBSTANTIATING_SUBSTANTIATING -> INSUBSTANTIATING (cancels substantiation request) SUBSTANTIATING -> INSUBSTANTIATING SUBSTANTIATING -> INSUBSTANTIATING_SUBSTANTIATING SUBSTANTIATING_STARTING -> INSUBSTANTIATING SUBSTANTIATING_STARTING -> INSUBSTANTIATING_SUBSTANTIATING then: < other state transitions may happen during this time > then either of: INSUBSTANTIATING_SUBSTANTIATING -> SUBSTANTIATING INSUBSTANTIATING -> NOT_SUBSTANTIATED """ ''' def checkConfig(self, name, password, build_wait_timeout=60 * 10, **kwargs): super().checkConfig(name, password, **kwargs) def reconfigService(self, name, password, build_wait_timeout=60 * 10, **kwargs): self._substantiation_notifier = Notifier() self._start_stop_lock = defer.DeferredLock() self.build_wait_timeout = build_wait_timeout return super().reconfigService(name, password, **kwargs) def getRandomPass(self): """ compute a random password There is no point to configure a password for a LatentWorker, as it is created by the master. For supporting backend, a password can be generated by this API """ return ''.join( random.choice(string.ascii_letters + string.digits) for _ in range(20)) @property def building(self): # A LatentWorkerForBuilder will only be busy if it is building. return {wfb for wfb in self.workerforbuilders.values() if wfb.isBusy()} def failed_to_start(self, instance_id, instance_state): log.msg('%s %s failed to start instance %s (%s)' % (self.__class__.__name__, self.workername, instance_id, instance_state)) raise LatentWorkerFailedToSubstantiate(instance_id, instance_state) def _log_start_stop_locked(self, action_str): if self._start_stop_lock.locked: log.msg(('while {} worker {}: waiting until previous ' + 'start_instance/stop_instance finishes').format(action_str, self)) def start_instance(self, build): # responsible for starting instance that will try to connect with this # master. Should return deferred with either True (instance started) # or False (instance not started, so don't run a build here). Problems # should use an errback. raise NotImplementedError def stop_instance(self, fast=False): # responsible for shutting down instance. raise NotImplementedError @property def substantiated(self): return self.state == States.SUBSTANTIATED and self.conn is not None def substantiate(self, wfb, build): log.msg("substantiating worker %s" % (wfb,)) if self.state == States.SUBSTANTIATED and self.conn is not None: self._setBuildWaitTimer() return defer.succeed(True) if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING, States.INSUBSTANTIATING_SUBSTANTIATING]: return self._substantiation_notifier.wait() self.startMissingTimer() # if anything of the following fails synchronously we need to have a # deferred ready to be notified d = self._substantiation_notifier.wait() if self.state == States.SUBSTANTIATED and self.conn is None: # connection dropped while we were substantiated. # insubstantiate to clean up and then substantiate normally. d_ins = self.insubstantiate(force_substantiation_build=build) d_ins.addErrback(log.err, 'while insubstantiating') return d assert self.state in [States.NOT_SUBSTANTIATED, States.INSUBSTANTIATING] if self.state == States.NOT_SUBSTANTIATED: self.state = States.SUBSTANTIATING self._substantiate(build) else: self.state = States.INSUBSTANTIATING_SUBSTANTIATING self.substantiation_build = build return d @defer.inlineCallbacks def _substantiate(self, build): assert self.state == States.SUBSTANTIATING try: # if build_wait_timeout is negative we don't ever disconnect the # worker ourselves, so we don't need to wait for it to attach # to declare it as substantiated. dont_wait_to_attach = \ self.build_wait_timeout < 0 and self.conn is not None start_success = True if ILatentMachine.providedBy(self.machine): start_success = yield self.machine.substantiate(self) try: self._log_start_stop_locked('substantiating') yield self._start_stop_lock.acquire() if start_success: self.state = States.SUBSTANTIATING_STARTING start_success = yield self.start_instance(build) finally: self._start_stop_lock.release() if not start_success: # this behaviour is kept as compatibility, but it is better # to just errback with a workable reason msg = "Worker does not want to substantiate at this time" raise LatentWorkerFailedToSubstantiate(self.name, msg) if dont_wait_to_attach and \ self.state == States.SUBSTANTIATING_STARTING and \ self.conn is not None: log.msg(r"Worker %s substantiated (already attached)" % (self.name,)) self.state = States.SUBSTANTIATED self._fireSubstantiationNotifier(True) except Exception as e: self.stopMissingTimer() self._substantiation_failed(failure.Failure(e)) # swallow the failure as it is notified def _fireSubstantiationNotifier(self, result): if not self._substantiation_notifier: log.msg("No substantiation deferred for %s" % (self.name,)) return result_msg = 'success' if result is True else 'failure' log.msg("Firing {} substantiation deferred with {}".format( self.name, result_msg)) self._substantiation_notifier.notify(result) @defer.inlineCallbacks def attached(self, bot): if self.state != States.SUBSTANTIATING_STARTING and \ self.build_wait_timeout >= 0: msg = 'Worker %s received connection while not trying to ' \ 'substantiate. Disconnecting.' % (self.name,) log.msg(msg) self._disconnect(bot) raise RuntimeError(msg) try: yield super().attached(bot) except Exception: self._substantiation_failed(failure.Failure()) return log.msg(r"Worker %s substantiated \o/" % (self.name,)) # only change state when we are actually substantiating. We could # end up at this point in different state than SUBSTANTIATING_STARTING # if build_wait_timeout is negative. In that case, the worker is never # shut down, but it may reconnect if the connection drops on its side # without master seeing this condition. # # When build_wait_timeout is not negative, we throw an error (see above) if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self.state = States.SUBSTANTIATED self._fireSubstantiationNotifier(True) def attachBuilder(self, builder): wfb = self.workerforbuilders.get(builder.name) return wfb.attached(self, self.worker_commands) def _missing_timer_fired(self): self.missing_timer = None return self._substantiation_failed(defer.TimeoutError()) def _substantiation_failed(self, failure): if self.state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self._fireSubstantiationNotifier(failure) d = self.insubstantiate() d.addErrback(log.err, 'while insubstantiating') # notify people, but only if we're still in the config if not self.parent or not self.notify_on_missing: return return self.master.data.updates.workerMissing( workerid=self.workerid, masterid=self.master.masterid, last_connection="Latent worker never connected", notify=self.notify_on_missing ) def canStartBuild(self): # we were disconnected, but all the builds are not yet cleaned up. if self.conn is None and self.building: return False return super().canStartBuild() def buildStarted(self, wfb): assert wfb.isBusy() self._clearBuildWaitTimer() if ILatentMachine.providedBy(self.machine): self.machine.notifyBuildStarted() def buildFinished(self, wfb): assert not wfb.isBusy() if not self.building: if self.build_wait_timeout == 0: # we insubstantiate asynchronously to trigger more bugs with # the fake reactor self.master.reactor.callLater(0, self._soft_disconnect) # insubstantiate will automatically retry to create build for # this worker else: self._setBuildWaitTimer() # AbstractWorker.buildFinished() will try to start the next build for # that worker super().buildFinished(wfb) if ILatentMachine.providedBy(self.machine): self.machine.notifyBuildFinished() def _clearBuildWaitTimer(self): if self.build_wait_timer is not None: if self.build_wait_timer.active(): self.build_wait_timer.cancel() self.build_wait_timer = None def _setBuildWaitTimer(self): self._clearBuildWaitTimer() if self.build_wait_timeout <= 0: return self.build_wait_timer = self.master.reactor.callLater( self.build_wait_timeout, self._soft_disconnect) @defer.inlineCallbacks def insubstantiate(self, fast=False, force_substantiation_build=None): # If force_substantiation_build is not None, we'll try to substantiate the given build # after insubstantiation concludes. This parameter allows to go directly to the # SUBSTANTIATING state without going through NOT_SUBSTANTIATED state. log.msg("insubstantiating worker {}".format(self)) if self.state == States.INSUBSTANTIATING_SUBSTANTIATING: self.state = States.INSUBSTANTIATING self.substantiation_build = None self._fireSubstantiationNotifier( failure.Failure(LatentWorkerSubstantiatiationCancelled())) try: self._log_start_stop_locked('insubstantiating') yield self._start_stop_lock.acquire() assert self.state not in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING] if self.state == States.NOT_SUBSTANTIATED: return prev_state = self.state if force_substantiation_build is not None: self.state = States.INSUBSTANTIATING_SUBSTANTIATING self.substantiation_build = force_substantiation_build else: self.state = States.INSUBSTANTIATING if prev_state in [States.SUBSTANTIATING, States.SUBSTANTIATING_STARTING]: self._fireSubstantiationNotifier( failure.Failure(LatentWorkerSubstantiatiationCancelled())) self._clearBuildWaitTimer() if prev_state in [States.SUBSTANTIATING_STARTING, States.SUBSTANTIATED]: try: yield self.stop_instance(fast) except Exception as e: # The case of failure for insubstantiation is bad as we have a # left-over costing resource There is not much thing to do here # generically, so we must put the problem of stop_instance # reliability to the backend driver log.err(e, "while insubstantiating") assert self.state in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING] if self.state == States.INSUBSTANTIATING_SUBSTANTIATING: build, self.substantiation_build = self.substantiation_build, None self.state = States.SUBSTANTIATING self._substantiate(build) else: # self.state == States.INSUBSTANTIATING: self.state = States.NOT_SUBSTANTIATED finally: self._start_stop_lock.release() self.botmaster.maybeStartBuildsForWorker(self.name) @defer.inlineCallbacks def _soft_disconnect(self, fast=False, stopping_service=False): if self.building: # If there are builds running or about to start on this worker, don't disconnect. # soft_disconnect happens during master reconfiguration or shutdown. It's not a good # reason to kill these builds. We effectively slow down reconfig until all workers # that have been unconfigured finish their builds. return # a negative build_wait_timeout means the worker should never be shut # down, so just disconnect. if not stopping_service and self.build_wait_timeout < 0: yield super().disconnect() return self.stopMissingTimer() yield defer.DeferredList([ super().disconnect(), self.insubstantiate(fast) ], consumeErrors=True, fireOnOneErrback=True) def disconnect(self): # This returns a Deferred but we don't use it self._soft_disconnect() # this removes the worker from all builders. It won't come back # without a restart (or maybe a sighup) self.botmaster.workerLost(self) @defer.inlineCallbacks def stopService(self): # the worker might be insubstantiating from buildWaitTimeout if self.state in [States.INSUBSTANTIATING, States.INSUBSTANTIATING_SUBSTANTIATING]: yield self._insubstantiation_notifier.wait() if self.conn is not None or self.state in [States.SUBSTANTIATING, States.SUBSTANTIATED]: yield self._soft_disconnect(stopping_service=True) self._clearBuildWaitTimer() res = yield super().stopService() return res def updateWorker(self): """Called to add or remove builders after the worker has connected. Also called after botmaster's builders are initially set. @return: a Deferred that indicates when an attached worker has accepted the new builders and/or released the old ones.""" for b in self.botmaster.getBuildersForWorker(self.name): if b.name not in self.workerforbuilders: b.addLatentWorker(self) return super().updateWorker() class LocalLatentWorker(AbstractLatentWorker): """ A worker that can be suspended by shutting down or suspending the hardware it runs on. It is intended to be used with LatentMachines. """ starts_without_substantiate = True def checkConfig(self, name, password, **kwargs): super.checkConfig(self, name, password, build_wait_timeout=-1, **kwargs) def reconfigService(self, name, password, **kwargs): return super().reconfigService(name, password, build_wait_timeout=-1, **kwargs) buildbot-2.6.0/master/buildbot/worker/libvirt.py000066400000000000000000000226401361162603000217620ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2010 Isotoma Limited import os from twisted.internet import defer from twisted.internet import threads from twisted.internet import utils from twisted.python import failure from twisted.python import log from buildbot import config from buildbot.util.eventual import eventually from buildbot.worker import AbstractLatentWorker try: import libvirt except ImportError: libvirt = None class WorkQueue: """ I am a class that turns parallel access into serial access. I exist because we want to run libvirt access in threads as we don't trust calls not to block, but under load libvirt doesn't seem to like this kind of threaded use. """ def __init__(self): self.queue = [] def _process(self): log.msg("Looking to start a piece of work now...") # Is there anything to do? if not self.queue: log.msg("_process called when there is no work") return # Peek at the top of the stack - get a function to call and # a deferred to fire when its all over d, next_operation, args, kwargs = self.queue[0] # Start doing some work - expects a deferred try: d2 = next_operation(*args, **kwargs) except Exception: d2 = defer.fail() # Whenever a piece of work is done, whether it worked or not # call this to schedule the next piece of work @d2.addBoth def _work_done(res): log.msg("Completed a piece of work") self.queue.pop(0) if self.queue: log.msg("Preparing next piece of work") eventually(self._process) return res # When the work is done, trigger d d2.chainDeferred(d) def execute(self, cb, *args, **kwargs): kickstart_processing = not self.queue d = defer.Deferred() self.queue.append((d, cb, args, kwargs)) if kickstart_processing: self._process() return d def executeInThread(self, cb, *args, **kwargs): return self.execute(threads.deferToThread, cb, *args, **kwargs) # A module is effectively a singleton class, so this is OK queue = WorkQueue() class Domain: """ I am a wrapper around a libvirt Domain object """ def __init__(self, connection, domain): self.connection = connection self.domain = domain def name(self): return queue.executeInThread(self.domain.name) def create(self): return queue.executeInThread(self.domain.create) def shutdown(self): return queue.executeInThread(self.domain.shutdown) def destroy(self): return queue.executeInThread(self.domain.destroy) class Connection: """ I am a wrapper around a libvirt Connection object. """ DomainClass = Domain def __init__(self, uri): self.uri = uri self.connection = libvirt.open(uri) @defer.inlineCallbacks def lookupByName(self, name): """ I lookup an existing predefined domain """ res = yield queue.executeInThread(self.connection.lookupByName, name) return self.DomainClass(self, res) @defer.inlineCallbacks def create(self, xml): """ I take libvirt XML and start a new VM """ res = yield queue.executeInThread(self.connection.createXML, xml, 0) return self.DomainClass(self, res) @defer.inlineCallbacks def all(self): domains = [] domain_ids = yield queue.executeInThread(self.connection.listDomainsID) for did in domain_ids: domain = yield queue.executeInThread(self.connection.lookupByID, did) domains.append(self.DomainClass(self, domain)) return domains class LibVirtWorker(AbstractLatentWorker): def __init__(self, name, password, connection, hd_image, base_image=None, xml=None, **kwargs): super().__init__(name, password, **kwargs) if not libvirt: config.error( "The python module 'libvirt' is needed to use a LibVirtWorker") self.connection = connection self.image = hd_image self.base_image = base_image self.xml = xml self.cheap_copy = True self.graceful_shutdown = False self.domain = None self.ready = False self._find_existing_deferred = self._find_existing_instance() @defer.inlineCallbacks def _find_existing_instance(self): """ I find existing VMs that are already running that might be orphaned instances of this worker. """ if not self.connection: return None domains = yield self.connection.all() for d in domains: name = yield d.name() if name.startswith(self.workername): self.domain = d break self.ready = True def canStartBuild(self): if not self.ready: log.msg("Not accepting builds as existing domains not iterated") return False if self.domain and not self.isConnected(): log.msg( "Not accepting builds as existing domain but worker not connected") return False return super().canStartBuild() def _prepare_base_image(self): """ I am a private method for creating (possibly cheap) copies of a base_image for start_instance to boot. """ if not self.base_image: return defer.succeed(True) if self.cheap_copy: clone_cmd = "qemu-img" clone_args = "create -b %(base)s -f qcow2 %(image)s" else: clone_cmd = "cp" clone_args = "%(base)s %(image)s" clone_args = clone_args % { "base": self.base_image, "image": self.image, } log.msg("Cloning base image: %s %s'" % (clone_cmd, clone_args)) d = utils.getProcessValue(clone_cmd, clone_args.split()) def _log_result(res): log.msg("Cloning exit code was: %d" % res) return res def _log_error(err): log.err("Cloning failed: %s" % err) return err d.addCallbacks(_log_result, _log_error) return d @defer.inlineCallbacks def start_instance(self, build): """ I start a new instance of a VM. If a base_image is specified, I will make a clone of that otherwise i will use image directly. If i'm not given libvirt domain definition XML, I will look for my name in the list of defined virtual machines and start that. """ if self.domain is not None: log.msg("Cannot start_instance '%s' as already active" % self.workername) return False yield self._prepare_base_image() try: if self.xml: self.domain = yield self.connection.create(self.xml) else: self.domain = yield self.connection.lookupByName(self.workername) yield self.domain.create() except Exception: log.err(failure.Failure(), "Cannot start a VM (%s), failing gracefully and triggering" "a new build check" % self.workername) self.domain = None return False return True def stop_instance(self, fast=False): """ I attempt to stop a running VM. I make sure any connection to the worker is removed. If the VM was using a cloned image, I remove the clone When everything is tidied up, I ask that bbot looks for work to do """ @defer.inlineCallbacks def _destroy_domain(res, domain): log.msg('Graceful shutdown failed. Force destroying domain %s' % self.workername) # Don't return res to stop propagating shutdown error if destroy # was successful. yield domain.destroy() log.msg("Attempting to stop '%s'" % self.workername) if self.domain is None: log.msg("I don't think that domain is even running, aborting") return defer.succeed(None) domain = self.domain self.domain = None if self.graceful_shutdown and not fast: log.msg("Graceful shutdown chosen for %s" % self.workername) d = domain.shutdown() d.addErrback(_destroy_domain, domain) else: d = domain.destroy() if self.base_image: @d.addBoth def _remove_image(res): log.msg('Removing base image %s for %s' % (self.image, self.workername)) os.remove(self.image) return res return d buildbot-2.6.0/master/buildbot/worker/local.py000066400000000000000000000044141361162603000214000ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members import os from twisted.internet import defer from buildbot.config import error from buildbot.worker.base import Worker class LocalWorker(Worker): def checkConfig(self, name, workdir=None, **kwargs): kwargs['password'] = None super().checkConfig(name, **kwargs) self.LocalWorkerFactory = None try: # importing here to avoid dependency on buildbot worker package from buildbot_worker.bot import LocalWorker as RemoteLocalWorker self.LocalWorkerFactory = RemoteLocalWorker except ImportError: error("LocalWorker needs the buildbot-worker package installed " "(pip install buildbot-worker)") self.remote_worker = None @defer.inlineCallbacks def reconfigService(self, name, workdir=None, **kwargs): kwargs['password'] = None super().reconfigService(name, **kwargs) if workdir is None: workdir = name workdir = os.path.abspath( os.path.join(self.master.basedir, "workers", workdir)) if not os.path.isdir(workdir): os.makedirs(workdir) if self.remote_worker is None: # create the actual worker as a child service # we only create at reconfig, to avoid polluting memory in case of # reconfig self.remote_worker = self.LocalWorkerFactory(name, workdir) yield self.remote_worker.setServiceParent(self) else: # The case of a reconfig, we forward the parameters self.remote_worker.bot.basedir = workdir buildbot-2.6.0/master/buildbot/worker/manager.py000066400000000000000000000123141361162603000217160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.process.measured_service import MeasuredBuildbotServiceManager from buildbot.util import misc from buildbot.worker.protocols import pb as bbpb class WorkerRegistration: __slots__ = ['master', 'worker', 'pbReg'] def __init__(self, master, worker): self.master = master self.worker = worker def __repr__(self): return "<%s for %r>" % (self.__class__.__name__, self.worker.workername) @defer.inlineCallbacks def unregister(self): bs = self.worker # update with portStr=None to remove any registration in place yield self.master.workers.pb.updateRegistration( bs.workername, bs.password, None) yield self.master.workers._unregister(self) @defer.inlineCallbacks def update(self, worker_config, global_config): # For most protocols, there's nothing to do, but for PB we must # update the registration in case the port or password has changed. if 'pb' in global_config.protocols: self.pbReg = yield self.master.workers.pb.updateRegistration( worker_config.workername, worker_config.password, global_config.protocols['pb']['port']) def getPBPort(self): return self.pbReg.getPort() class WorkerManager(MeasuredBuildbotServiceManager): name = "WorkerManager" managed_services_name = "workers" config_attr = "workers" PING_TIMEOUT = 10 reconfig_priority = 127 def __init__(self, master): super().__init__() self.pb = bbpb.Listener() self.pb.setServiceParent(master) # WorkerRegistration instances keyed by worker name self.registrations = {} # connection objects keyed by worker name self.connections = {} @property def workers(self): # self.workers contains a ready Worker instance for each # potential worker, i.e. all the ones listed in the config file. # If the worker is connected, self.workers[workername].worker will # contain a RemoteReference to their Bot instance. If it is not # connected, that attribute will hold None. # workers attribute is actually just an alias to multiService's # namedService return self.namedServices def getWorkerByName(self, workerName): return self.registrations[workerName].worker def register(self, worker): # TODO: doc that reg.update must be called, too workerName = worker.workername reg = WorkerRegistration(self.master, worker) self.registrations[workerName] = reg return defer.succeed(reg) def _unregister(self, registration): del self.registrations[registration.worker.workername] @defer.inlineCallbacks def newConnection(self, conn, workerName): if workerName in self.connections: log.msg("Got duplication connection from '%s'" " starting arbitration procedure" % workerName) old_conn = self.connections[workerName] try: yield misc.cancelAfter(self.PING_TIMEOUT, old_conn.remotePrint("master got a duplicate connection")) # if we get here then old connection is still alive, and new # should be rejected raise RuntimeError("rejecting duplicate worker") except defer.CancelledError: old_conn.loseConnection() log.msg("Connected worker '%s' ping timed out after %d seconds" % (workerName, self.PING_TIMEOUT)) except RuntimeError: raise except Exception as e: old_conn.loseConnection() log.msg("Got error while trying to ping connected worker %s:" "%s" % (workerName, e)) log.msg("Old connection for '%s' was lost, accepting new" % workerName) try: yield conn.remotePrint(message="attached") info = yield conn.remoteGetWorkerInfo() log.msg("Got workerinfo from '%s'" % workerName) except Exception as e: log.msg("Failed to communicate with worker '%s'\n" "%s" % (workerName, e)) raise conn.info = info self.connections[workerName] = conn def remove(): del self.connections[workerName] conn.notifyOnDisconnect(remove) # accept the connection return True buildbot-2.6.0/master/buildbot/worker/marathon.py000066400000000000000000000111211361162603000221100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot import util from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util.httpclientservice import HTTPClientService from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.util.logger import Logger from buildbot.worker.docker import DockerBaseWorker log = Logger() class MarathonLatentWorker(DockerBaseWorker, CompatibleLatentWorkerMixin): """Marathon is a distributed docker container launcher for Mesos""" instance = None image = None _http = None def checkConfig(self, name, marathon_url, image, marathon_auth=None, marathon_extra_config=None, marathon_app_prefix="buildbot-worker/", masterFQDN=None, **kwargs): super().checkConfig(name, image=image, masterFQDN=masterFQDN, **kwargs) HTTPClientService.checkAvailable(self.__class__.__name__) @defer.inlineCallbacks def reconfigService(self, name, marathon_url, image, marathon_auth=None, marathon_extra_config=None, marathon_app_prefix="buildbot-worker/", masterFQDN=None, **kwargs): # Set build_wait_timeout to 0s if not explicitly set: Starting a # container is almost immediate, we can afford doing so for each build. if 'build_wait_timeout' not in kwargs: kwargs['build_wait_timeout'] = 0 yield super().reconfigService(name, image=image, masterFQDN=masterFQDN, **kwargs) self._http = yield HTTPClientService.getService( self.master, marathon_url, auth=marathon_auth) if marathon_extra_config is None: marathon_extra_config = {} self.marathon_extra_config = marathon_extra_config self.marathon_app_prefix = marathon_app_prefix def getApplicationId(self): return self.marathon_app_prefix + self.getContainerName() def renderWorkerProps(self, build): return build.render((self.image, self.marathon_extra_config)) @defer.inlineCallbacks def start_instance(self, build): yield self.stop_instance(reportFailure=False) image, marathon_extra_config = \ yield self.renderWorkerPropsOnStart(build) marathon_config = { "container": { "docker": { "image": image, "network": "BRIDGE", }, "type": "DOCKER" }, "id": self.getApplicationId(), "instances": 1, "env": self.createEnvironment() } util.dictionary_merge(marathon_config, marathon_extra_config) res = yield self._http.post("/v2/apps", json=marathon_config) res_json = yield res.json() if res.code != 201: raise LatentWorkerFailedToSubstantiate( "Unable to create Marathon app: {} {}: {} {}".format( self.getApplicationId(), res.code, res_json['message'], res_json)) self.instance = res_json return True @defer.inlineCallbacks def stop_instance(self, fast=False, reportFailure=True): res = yield self._http.delete("/v2/apps/{}".format( self.getApplicationId())) self.instance = None if res.code != 200 and reportFailure: res_json = yield res.json() # the error is not documented :-( log.warn( "Unable to delete Marathon app: {id} {code}: {message} {details}", id=self.getApplicationId(), code=res.code, message=res_json.get('message'), details=res_json) buildbot-2.6.0/master/buildbot/worker/openstack.py000066400000000000000000000301441361162603000222740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Portions Copyright Buildbot Team Members # Portions Copyright 2013 Cray Inc. import math import time from twisted.internet import defer from twisted.internet import threads from twisted.python import log from buildbot import config from buildbot.interfaces import LatentWorkerFailedToSubstantiate from buildbot.util.latent import CompatibleLatentWorkerMixin from buildbot.worker import AbstractLatentWorker try: from keystoneauth1 import loading from keystoneauth1 import session from novaclient import client from novaclient.exceptions import NotFound _hush_pyflakes = [client] except ImportError: NotFound = Exception client = None loading = None session = None ACTIVE = 'ACTIVE' BUILD = 'BUILD' DELETED = 'DELETED' UNKNOWN = 'UNKNOWN' class OpenStackLatentWorker(AbstractLatentWorker, CompatibleLatentWorkerMixin): instance = None _poll_resolution = 5 # hook point for tests def __init__(self, name, password, flavor, os_username, os_password, os_tenant_name, os_auth_url, os_user_domain=None, os_project_domain=None, block_devices=None, region=None, image=None, meta=None, # Have a nova_args parameter to allow passing things directly # to novaclient. nova_args=None, client_version='2', **kwargs): if not client: config.error("The python module 'novaclient' is needed " "to use a OpenStackLatentWorker. " "Please install 'python-novaclient' package.") if not loading or not session: config.error("The python module 'keystoneauth1' is needed " "to use a OpenStackLatentWorker. " "Please install the 'keystoneauth1' package.") if not block_devices and not image: raise ValueError('One of block_devices or image must be given') super().__init__(name, password, **kwargs) self.flavor = flavor self.client_version = client_version if client: self.novaclient = self._constructClient( client_version, os_username, os_user_domain, os_password, os_tenant_name, os_project_domain, os_auth_url) if region is not None: self.novaclient.client.region_name = region if block_devices is not None: self.block_devices = [ self._parseBlockDevice(bd) for bd in block_devices] else: self.block_devices = None self.image = image self.meta = meta self.nova_args = nova_args if nova_args is not None else {} @staticmethod def _constructClient(client_version, username, user_domain, password, project_name, project_domain, auth_url): """Return a novaclient from the given args.""" loader = loading.get_plugin_loader('password') # These only work with v3 if user_domain is not None or project_domain is not None: auth = loader.load_from_options(auth_url=auth_url, username=username, user_domain_name=user_domain, password=password, project_name=project_name, project_domain_name=project_domain) else: auth = loader.load_from_options(auth_url=auth_url, username=username, password=password, project_name=project_name) sess = session.Session(auth=auth) return client.Client(client_version, session=sess) def _parseBlockDevice(self, block_device): """ Parse a higher-level view of the block device mapping into something novaclient wants. This should be similar to how Horizon presents it. Required keys: device_name: The name of the device; e.g. vda or xda. source_type: image, snapshot, volume, or blank/None. destination_type: Destination of block device: volume or local. delete_on_termination: True/False. uuid: The image, snapshot, or volume id. boot_index: Integer used for boot order. volume_size: Size of the device in GiB. """ client_block_device = {} client_block_device['device_name'] = block_device.get( 'device_name', 'vda') client_block_device['source_type'] = block_device.get( 'source_type', 'image') client_block_device['destination_type'] = block_device.get( 'destination_type', 'volume') client_block_device['delete_on_termination'] = bool( block_device.get('delete_on_termination', True)) client_block_device['uuid'] = block_device['uuid'] client_block_device['boot_index'] = int( block_device.get('boot_index', 0)) # Allow None here. It will be rendered later. client_block_device['volume_size'] = block_device.get('volume_size') return client_block_device @defer.inlineCallbacks def _renderBlockDevice(self, block_device, build): """Render all of the block device's values.""" rendered_block_device = yield build.render(block_device) if rendered_block_device['volume_size'] is None: source_type = rendered_block_device['source_type'] source_uuid = rendered_block_device['uuid'] volume_size = self._determineVolumeSize(source_type, source_uuid) rendered_block_device['volume_size'] = volume_size return rendered_block_device def _determineVolumeSize(self, source_type, source_uuid): """ Determine the minimum size the volume needs to be for the source. Returns the size in GiB. """ nova = self.novaclient if source_type == 'image': # The size returned for an image is in bytes. Round up to the next # integer GiB. image = nova.images.get(source_uuid) if hasattr(image, 'OS-EXT-IMG-SIZE:size'): size = getattr(image, 'OS-EXT-IMG-SIZE:size') size_gb = int(math.ceil(size / 1024.0**3)) return size_gb elif source_type == 'volume': # Volumes are easy because they are already in GiB. volume = nova.volumes.get(source_uuid) return volume.size elif source_type == 'snapshot': snap = nova.volume_snapshots.get(source_uuid) return snap.size else: unknown_source = ("The source type '%s' for UUID '%s' is" " unknown" % (source_type, source_uuid)) raise ValueError(unknown_source) @defer.inlineCallbacks def _getImage(self, build): # If image is a callable, then pass it the list of images. The # function should return the image's UUID to use. image = self.image if callable(image): image_uuid = image(self.novaclient.images.list()) else: image_uuid = yield build.render(image) return image_uuid @defer.inlineCallbacks def renderWorkerProps(self, build): image = yield self._getImage(build) if self.block_devices is not None: block_devices = [] for bd in self.block_devices: rendered_block_device = yield self._renderBlockDevice(bd, build) block_devices.append(rendered_block_device) else: block_devices = None return (image, block_devices) @defer.inlineCallbacks def start_instance(self, build): if self.instance is not None: raise ValueError('instance active') image, block_devices = yield self.renderWorkerPropsOnStart(build) res = yield threads.deferToThread(self._start_instance, image, block_devices) return res def _start_instance(self, image_uuid, block_devices): boot_args = [self.workername, image_uuid, self.flavor] boot_kwargs = dict( meta=self.meta, block_device_mapping_v2=block_devices, **self.nova_args) instance = self.novaclient.servers.create(*boot_args, **boot_kwargs) # There is an issue when using sessions that the status is not # available on the first try. Trying again will work fine. Fetch the # instance to avoid that. try: instance = self.novaclient.servers.get(instance.id) except NotFound: log.msg('{class_name} {name} instance {instance.id} ' '({instance.name}) never found', class_name=self.__class__.__name__, name=self.workername, instance=instance) raise LatentWorkerFailedToSubstantiate( instance.id, BUILD) self.instance = instance log.msg('%s %s starting instance %s (image %s)' % (self.__class__.__name__, self.workername, instance.id, image_uuid)) duration = 0 interval = self._poll_resolution while instance.status.startswith(BUILD): time.sleep(interval) duration += interval if duration % 60 == 0: log.msg('%s %s has waited %d minutes for instance %s' % (self.__class__.__name__, self.workername, duration // 60, instance.id)) try: instance = self.novaclient.servers.get(instance.id) except NotFound: log.msg('%s %s instance %s (%s) went missing' % (self.__class__.__name__, self.workername, instance.id, instance.name)) raise LatentWorkerFailedToSubstantiate( instance.id, instance.status) if instance.status == ACTIVE: minutes = duration // 60 seconds = duration % 60 log.msg('%s %s instance %s (%s) started ' 'in about %d minutes %d seconds' % (self.__class__.__name__, self.workername, instance.id, instance.name, minutes, seconds)) return [instance.id, image_uuid, '%02d:%02d:%02d' % (minutes // 60, minutes % 60, seconds)] else: self.failed_to_start(instance.id, instance.status) def stop_instance(self, fast=False): if self.instance is None: # be gentle. Something may just be trying to alert us that an # instance never attached, and it's because, somehow, we never # started. return defer.succeed(None) instance = self.instance self.instance = None self._stop_instance(instance, fast) def _stop_instance(self, instance, fast): try: instance = self.novaclient.servers.get(instance.id) except NotFound: # If can't find the instance, then it's already gone. log.msg('%s %s instance %s (%s) already terminated' % (self.__class__.__name__, self.workername, instance.id, instance.name)) return if instance.status not in (DELETED, UNKNOWN): instance.delete() log.msg('%s %s terminating instance %s (%s)' % (self.__class__.__name__, self.workername, instance.id, instance.name)) buildbot-2.6.0/master/buildbot/worker/protocols/000077500000000000000000000000001361162603000217555ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/worker/protocols/__init__.py000066400000000000000000000000001361162603000240540ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/worker/protocols/base.py000066400000000000000000000064751361162603000232550ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from buildbot.util import service from buildbot.util import subscription class Listener(service.ReconfigurableServiceMixin, service.AsyncMultiService): pass class Connection: proxies = {} def __init__(self, master, worker): self.master = master self.worker = worker name = worker.workername self._disconnectSubs = subscription.SubscriptionPoint( "disconnections from %s" % name) # This method replace all Impl args by their Proxy protocol implementation def createArgsProxies(self, args): newargs = {} for k, v in args.items(): for implclass, proxyclass in self.proxies.items(): if isinstance(v, implclass): v = proxyclass(v) newargs[k] = v return newargs # disconnection handling def waitShutdown(self): return defer.succeed(None) def notifyOnDisconnect(self, cb): return self._disconnectSubs.subscribe(cb) def waitForNotifyDisconnectedDelivered(self): return self._disconnectSubs.waitForDeliveriesToFinish() def notifyDisconnected(self): self._disconnectSubs.deliver() def loseConnection(self): raise NotImplementedError # methods to send messages to the worker def remotePrint(self, message): raise NotImplementedError def remoteGetWorkerInfo(self): raise NotImplementedError def remoteSetBuilderList(self, builders): raise NotImplementedError def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): raise NotImplementedError def remoteShutdown(self): raise NotImplementedError def remoteStartBuild(self, builderName): raise NotImplementedError def remoteInterruptCommand(self, builderName, commandId, why): raise NotImplementedError # RemoteCommand base implementation and base proxy class RemoteCommandImpl: def remote_update(self, updates): raise NotImplementedError def remote_complete(self, failure=None): raise NotImplementedError # FileWriter base implementation class FileWriterImpl: def remote_write(self, data): raise NotImplementedError def remote_utime(self, accessed_modified): raise NotImplementedError def remote_unpack(self): raise NotImplementedError def remote_close(self): raise NotImplementedError # FileReader base implementation class FileReaderImpl: def remote_read(self, maxLength): raise NotImplementedError def remote_close(self): raise NotImplementedError buildbot-2.6.0/master/buildbot/worker/protocols/null.py000066400000000000000000000066071361162603000233120ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.python import log from buildbot.util.eventual import fireEventually from buildbot.worker.protocols import base class Listener(base.Listener): pass class ProxyMixin(): def __init__(self, impl): assert isinstance(impl, self.ImplClass) self.impl = impl self._disconnect_listeners = [] def callRemote(self, message, *args, **kw): method = getattr(self.impl, "remote_%s" % message, None) if method is None: raise AttributeError("No such method: remote_%s" % (message,)) try: state = method(*args, **kw) except TypeError: log.msg("%s didn't accept %s and %s" % (method, args, kw)) raise # break callback recursion for large transfers by using fireEventually return fireEventually(state) def notifyOnDisconnect(self, cb): pass def dontNotifyOnDisconnect(self, cb): pass # just add ProxyMixin capability to the RemoteCommandProxy # so that callers of callRemote actually directly call the proper method class RemoteCommandProxy(ProxyMixin): ImplClass = base.RemoteCommandImpl class FileReaderProxy(ProxyMixin): ImplClass = base.FileReaderImpl class FileWriterProxy(ProxyMixin): ImplClass = base.FileWriterImpl class Connection(base.Connection): proxies = {base.FileWriterImpl: FileWriterProxy, base.FileReaderImpl: FileReaderProxy} def loseConnection(self): self.notifyDisconnected() def remotePrint(self, message): return defer.maybeDeferred(self.worker.bot.remote_print, message) def remoteGetWorkerInfo(self): return defer.maybeDeferred(self.worker.bot.remote_getWorkerInfo) def remoteSetBuilderList(self, builders): return defer.maybeDeferred(self.worker.bot.remote_setBuilderList, builders) def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): remoteCommand = RemoteCommandProxy(remoteCommand) args = self.createArgsProxies(args) workerforbuilder = self.worker.bot.builders[builderName] return defer.maybeDeferred(workerforbuilder.remote_startCommand, remoteCommand, commandId, commandName, args) def remoteShutdown(self): return defer.maybeDeferred(self.worker.stopService) def remoteStartBuild(self, builderName): return defer.succeed(self.worker.bot.builders[builderName].remote_startBuild()) def remoteInterruptCommand(self, builderName, commandId, why): workerforbuilder = self.worker.bot.builders[builderName] return defer.maybeDeferred(workerforbuilder.remote_interruptCommand, commandId, why) buildbot-2.6.0/master/buildbot/worker/protocols/pb.py000066400000000000000000000273731361162603000227440ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import contextlib from twisted.internet import defer from twisted.python import log from twisted.spread import pb from buildbot.pbutil import decode from buildbot.util import deferwaiter from buildbot.worker.protocols import base class Listener(base.Listener): name = "pbListener" def __init__(self): super().__init__() # username : (password, portstr, PBManager registration) self._registrations = {} @defer.inlineCallbacks def updateRegistration(self, username, password, portStr): # NOTE: this method is only present on the PB protocol; others do not # use registrations if username in self._registrations: currentPassword, currentPortStr, currentReg = \ self._registrations[username] else: currentPassword, currentPortStr, currentReg = None, None, None if currentPassword != password or currentPortStr != portStr: if currentReg: yield currentReg.unregister() del self._registrations[username] if portStr and password: reg = yield self.master.pbmanager.register(portStr, username, password, self._getPerspective) self._registrations[username] = (password, portStr, reg) return reg @defer.inlineCallbacks def _getPerspective(self, mind, workerName): workers = self.master.workers log.msg("worker '%s' attaching from %s" % (workerName, mind.broker.transport.getPeer())) # try to use TCP keepalives try: mind.broker.transport.setTcpKeepAlive(1) except Exception: log.err("Can't set TcpKeepAlive") worker = workers.getWorkerByName(workerName) conn = Connection(self.master, worker, mind) # inform the manager, logging any problems in the deferred accepted = yield workers.newConnection(conn, workerName) # return the Connection as the perspective if accepted: return conn else: # TODO: return something more useful raise RuntimeError("rejecting duplicate worker") class ReferenceableProxy(pb.Referenceable): def __init__(self, impl): assert isinstance(impl, self.ImplClass) self.impl = impl def __getattr__(self, default=None): return getattr(self.impl, default) # Proxy are just ReferenceableProxy to the Impl classes class RemoteCommand(ReferenceableProxy): ImplClass = base.RemoteCommandImpl class FileReaderProxy(ReferenceableProxy): ImplClass = base.FileReaderImpl class FileWriterProxy(ReferenceableProxy): ImplClass = base.FileWriterImpl class _NoSuchMethod(Exception): """Rewrapped pb.NoSuchMethod remote exception""" @contextlib.contextmanager def _wrapRemoteException(): try: yield except pb.RemoteError as ex: if ex.remoteType in (b'twisted.spread.flavors.NoSuchMethod', 'twisted.spread.flavors.NoSuchMethod'): raise _NoSuchMethod(ex) raise class Connection(base.Connection, pb.Avatar): proxies = {base.FileWriterImpl: FileWriterProxy, base.FileReaderImpl: FileReaderProxy} # TODO: configure keepalive_interval in # c['protocols']['pb']['keepalive_interval'] keepalive_timer = None keepalive_interval = 3600 info = None def __init__(self, master, worker, mind): super().__init__(master, worker) self.mind = mind self._keepalive_waiter = deferwaiter.DeferWaiter() self._keepalive_action_handler = \ deferwaiter.RepeatedActionHandler(master.reactor, self._keepalive_waiter, self.keepalive_interval, self._do_keepalive) # methods called by the PBManager @defer.inlineCallbacks def attached(self, mind): self.startKeepaliveTimer() # pbmanager calls perspective.attached; pass this along to the # worker yield self.worker.attached(self) # and then return a reference to the avatar return self def detached(self, mind): self.stopKeepaliveTimer() self.mind = None self.notifyDisconnected() # disconnection handling @defer.inlineCallbacks def waitShutdown(self): self.stopKeepaliveTimer() yield self._keepalive_waiter.wait() def loseConnection(self): self.stopKeepaliveTimer() tport = self.mind.broker.transport # this is the polite way to request that a socket be closed tport.loseConnection() try: # but really we don't want to wait for the transmit queue to # drain. The remote end is unlikely to ACK the data, so we'd # probably have to wait for a (20-minute) TCP timeout. # tport._closeSocket() # however, doing _closeSocket (whether before or after # loseConnection) somehow prevents the notifyOnDisconnect # handlers from being run. Bummer. tport.offset = 0 tport.dataBuffer = b"" except Exception: # however, these hacks are pretty internal, so don't blow up if # they fail or are unavailable log.msg("failed to accelerate the shutdown process") # keepalive handling def _do_keepalive(self): return self.mind.callRemote('print', message="keepalive") def stopKeepaliveTimer(self): self._keepalive_action_handler.stop() def startKeepaliveTimer(self): assert self.keepalive_interval self._keepalive_action_handler.start() # methods to send messages to the worker def remotePrint(self, message): return self.mind.callRemote('print', message=message) @defer.inlineCallbacks def remoteGetWorkerInfo(self): try: with _wrapRemoteException(): # Try to call buildbot-worker method. info = yield self.mind.callRemote('getWorkerInfo') return decode(info) except _NoSuchMethod: yield self.remotePrint( "buildbot-slave detected, failing back to deprecated buildslave API. " "(Ignoring missing getWorkerInfo method.)") info = {} # Probably this is deprecated buildslave. log.msg("Worker.getWorkerInfo is unavailable - falling back to " "deprecated buildslave API") try: with _wrapRemoteException(): info = yield self.mind.callRemote('getSlaveInfo') except _NoSuchMethod: log.msg("Worker.getSlaveInfo is unavailable - ignoring") # newer workers send all info in one command if "slave_commands" in info: assert "worker_commands" not in info info["worker_commands"] = info.pop("slave_commands") return info # Old version buildslave - need to retrieve list of supported # commands and version using separate requests. try: with _wrapRemoteException(): info["worker_commands"] = yield self.mind.callRemote( 'getCommands') except _NoSuchMethod: log.msg("Worker.getCommands is unavailable - ignoring") try: with _wrapRemoteException(): info["version"] = yield self.mind.callRemote('getVersion') except _NoSuchMethod: log.msg("Worker.getVersion is unavailable - ignoring") return decode(info) @defer.inlineCallbacks def remoteSetBuilderList(self, builders): builders = yield self.mind.callRemote('setBuilderList', builders) self.builders = builders return builders def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args): workerforbuilder = self.builders.get(builderName) remoteCommand = RemoteCommand(remoteCommand) args = self.createArgsProxies(args) return workerforbuilder.callRemote('startCommand', remoteCommand, commandId, commandName, args) @defer.inlineCallbacks def remoteShutdown(self): # First, try the "new" way - calling our own remote's shutdown # method. The method was only added in 0.8.3, so ignore NoSuchMethod # failures. @defer.inlineCallbacks def new_way(): try: with _wrapRemoteException(): yield self.mind.callRemote('shutdown') # successful shutdown request return True except _NoSuchMethod: # fall through to the old way return False except pb.PBConnectionLost: # the worker is gone, so call it finished return True if (yield new_way()): return # done! # Now, the old way. Look for a builder with a remote reference to the # client side worker. If we can find one, then call "shutdown" on the # remote builder, which will cause the worker buildbot process to exit. def old_way(): d = None for b in self.worker.workerforbuilders.values(): if b.remote: d = b.mind.callRemote("shutdown") break if d: name = self.worker.workername log.msg("Shutting down (old) worker: %s" % name) # The remote shutdown call will not complete successfully since # the buildbot process exits almost immediately after getting # the shutdown request. # Here we look at the reason why the remote call failed, and if # it's because the connection was lost, that means the worker # shutdown as expected. @d.addErrback def _errback(why): if why.check(pb.PBConnectionLost): log.msg("Lost connection to %s" % name) else: log.err("Unexpected error when trying to shutdown %s" % name) return d log.err("Couldn't find remote builder to shut down worker") return defer.succeed(None) yield old_way() def remoteStartBuild(self, builderName): workerforbuilder = self.builders.get(builderName) return workerforbuilder.callRemote('startBuild') def remoteInterruptCommand(self, builderName, commandId, why): workerforbuilder = self.builders.get(builderName) return defer.maybeDeferred(workerforbuilder.callRemote, "interruptCommand", commandId, why) # perspective methods called by the worker def perspective_keepalive(self): self.worker.messageReceivedFromWorker() def perspective_shutdown(self): self.worker.messageReceivedFromWorker() self.worker.shutdownRequested() buildbot-2.6.0/master/buildbot/worker_transition.py000066400000000000000000000116261361162603000225630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Utility functions to support transition from "slave"-named API to "worker"-named. Use of old API generates Python warning which may be logged, ignored or treated as an error using Python builtin warnings API. """ import warnings from twisted.python.deprecate import getWarningMethod from twisted.python.deprecate import setWarningMethod __all__ = ( "DeprecatedWorkerNameWarning", "setupWorkerTransition", ) _WORKER_WARNING_MARK = "[WORKER]" def _compat_name(new_name, compat_name=None): """Returns old API ("slave") name for new name ("worker"). >>> assert _compat_name("Worker") == "Slave" >>> assert _compat_name("SomeWorkerStuff") == "SomeSlaveStuff" >>> assert _compat_name("SomeWorker", compat_name="SomeBuildSlave") == \ "SomeBuildSlave" If `compat_name` is not specified old name is construct by replacing in `new_name`: "worker" -> "slave", "Worker" -> "Slave". For the sake of simplicity of usage if `compat_name` argument is specified it will returned as the result. """ if compat_name is not None: assert "slave" in compat_name.lower() assert new_name == "" or "worker" in new_name.lower(), new_name return compat_name compat_replacements = { "worker": "slave", "Worker": "Slave", } compat_name = new_name assert "slave" not in compat_name.lower() assert "worker" in compat_name.lower() for new_word, old_word in compat_replacements.items(): compat_name = compat_name.replace(new_word, old_word) assert compat_name != new_name assert "slave" in compat_name.lower() assert "worker" not in compat_name.lower() return compat_name # DeprecationWarning or PendingDeprecationWarning may be used as # the base class, but by default deprecation warnings are disabled in Python, # so by default old-API usage warnings will be ignored - this is not what # we want. class DeprecatedWorkerAPIWarning(Warning): """Base class for deprecated API warnings.""" class DeprecatedWorkerNameWarning(DeprecatedWorkerAPIWarning): """Warning class for use of deprecated classes, functions, methods and attributes. """ # Separate warnings about deprecated modules from other deprecated # identifiers. Deprecated modules are loaded only once and it's hard to # predict in tests exact places where warning should be issued (in contrast # warnings about other identifiers will be issued every usage). class DeprecatedWorkerModuleWarning(DeprecatedWorkerAPIWarning): """Warning class for use of deprecated modules.""" def reportDeprecatedWorkerNameUsage(message, stacklevel=None, filename=None, lineno=None): """Hook that is ran when old API name is used. :param stacklevel: stack level relative to the caller's frame. Defaults to caller of the caller of this function. """ if filename is None: if stacklevel is None: # Warning will refer to the caller of the caller of this function. stacklevel = 3 else: stacklevel += 2 warnings.warn(DeprecatedWorkerNameWarning(message), None, stacklevel) else: assert stacklevel is None if lineno is None: lineno = 0 warnings.warn_explicit( DeprecatedWorkerNameWarning(message), DeprecatedWorkerNameWarning, filename, lineno) def setupWorkerTransition(): """Hook Twisted deprecation machinery to use custom warning class for Worker API deprecation warnings.""" default_warn_method = getWarningMethod() def custom_warn_method(message, category, stacklevel): if stacklevel is not None: stacklevel += 1 if _WORKER_WARNING_MARK in message: # Message contains our mark - it's Worker API Renaming warning, # issue it appropriately. message = message.replace(_WORKER_WARNING_MARK, "") warnings.warn( DeprecatedWorkerNameWarning(message), message, stacklevel) else: # Other's warning message default_warn_method(message, category, stacklevel) setWarningMethod(custom_warn_method) # Enable worker transition hooks setupWorkerTransition() buildbot-2.6.0/master/buildbot/www/000077500000000000000000000000001361162603000172445ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/www/__init__.py000066400000000000000000000000001361162603000213430ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/www/auth.py000066400000000000000000000170771361162603000205730ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from abc import ABCMeta from abc import abstractmethod from twisted.cred.checkers import FilePasswordDB from twisted.cred.checkers import ICredentialsChecker from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.cred.credentials import IUsernamePassword from twisted.cred.error import UnauthorizedLogin from twisted.cred.portal import IRealm from twisted.cred.portal import Portal from twisted.internet import defer from twisted.web.error import Error from twisted.web.guard import BasicCredentialFactory from twisted.web.guard import DigestCredentialFactory from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.resource import IResource from zope.interface import implementer from buildbot.util import bytes2unicode from buildbot.util import config from buildbot.util import unicode2bytes from buildbot.www import resource class AuthRootResource(resource.Resource): def getChild(self, path, request): # return dynamically generated resources if path == b'login': return self.master.www.auth.getLoginResource() elif path == b'logout': return self.master.www.auth.getLogoutResource() return super().getChild(path, request) class AuthBase(config.ConfiguredMixin): def __init__(self, userInfoProvider=None): self.userInfoProvider = userInfoProvider def reconfigAuth(self, master, new_config): self.master = master def maybeAutoLogin(self, request): return defer.succeed(None) def getLoginResource(self): raise Error(501, b"not implemented") def getLogoutResource(self): return LogoutResource(self.master) @defer.inlineCallbacks def updateUserInfo(self, request): session = request.getSession() if self.userInfoProvider is not None: infos = yield self.userInfoProvider.getUserInfo(session.user_info['username']) session.user_info.update(infos) session.updateSession(request) def getConfigDict(self): return {'name': type(self).__name__} class UserInfoProviderBase(config.ConfiguredMixin): name = "noinfo" def getUserInfo(self, username): return defer.succeed({'email': username}) class LoginResource(resource.Resource): def render_GET(self, request): return self.asyncRenderHelper(request, self.renderLogin) @defer.inlineCallbacks def renderLogin(self, request): raise NotImplementedError class NoAuth(AuthBase): pass class RemoteUserAuth(AuthBase): header = b"REMOTE_USER" headerRegex = re.compile(br"(?P[^ @]+)@(?P[^ @]+)") def __init__(self, header=None, headerRegex=None, **kwargs): super().__init__(**kwargs) if self.userInfoProvider is None: self.userInfoProvider = UserInfoProviderBase() if header is not None: self.header = unicode2bytes(header) if headerRegex is not None: self.headerRegex = re.compile(unicode2bytes(headerRegex)) @defer.inlineCallbacks def maybeAutoLogin(self, request): header = request.getHeader(self.header) if header is None: raise Error(403, b"missing http header " + self.header + b". Check your reverse proxy config!") res = self.headerRegex.match(header) if res is None: raise Error( 403, b'http header does not match regex! "' + header + b'" not matching ' + self.headerRegex.pattern) session = request.getSession() user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()} if session.user_info != user_info: session.user_info = user_info yield self.updateUserInfo(request) @implementer(IRealm) class AuthRealm: def __init__(self, master, auth): self.auth = auth self.master = master def requestAvatar(self, avatarId, mind, *interfaces): if IResource in interfaces: return (IResource, PreAuthenticatedLoginResource(self.master, avatarId), lambda: None) raise NotImplementedError() class TwistedICredAuthBase(AuthBase): def __init__(self, credentialFactories, checkers, **kwargs): super().__init__(**kwargs) if self.userInfoProvider is None: self.userInfoProvider = UserInfoProviderBase() self.credentialFactories = credentialFactories self.checkers = checkers def getLoginResource(self): return HTTPAuthSessionWrapper( Portal(AuthRealm(self.master, self), self.checkers), self.credentialFactories) class HTPasswdAuth(TwistedICredAuthBase): def __init__(self, passwdFile, **kwargs): super().__init__([DigestCredentialFactory(b"md5", b"buildbot"), BasicCredentialFactory(b"buildbot")], [FilePasswordDB(passwdFile)], **kwargs) class UserPasswordAuth(TwistedICredAuthBase): def __init__(self, users, **kwargs): if isinstance(users, dict): users = {user: unicode2bytes(pw) for user, pw in users.items()} elif isinstance(users, list): users = [(user, unicode2bytes(pw)) for user, pw in users] super().__init__([DigestCredentialFactory(b"md5", b"buildbot"), BasicCredentialFactory(b"buildbot")], [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))], **kwargs) @implementer(ICredentialsChecker) class CustomAuth(TwistedICredAuthBase): __metaclass__ = ABCMeta credentialInterfaces = [IUsernamePassword] def __init__(self, **kwargs): super().__init__([BasicCredentialFactory(b"buildbot")], [self], **kwargs) def requestAvatarId(self, cred): if self.check_credentials(cred.username, cred.password): return defer.succeed(cred.username) return defer.fail(UnauthorizedLogin()) @abstractmethod def check_credentials(username, password): return False def _redirect(master, request): url = request.args.get(b"redirect", [b"/"])[0] url = bytes2unicode(url) return resource.Redirect(master.config.buildbotURL + "#" + url) class PreAuthenticatedLoginResource(LoginResource): # a LoginResource which is already authenticated via a # HTTPAuthSessionWrapper def __init__(self, master, username): super().__init__(master) self.username = username @defer.inlineCallbacks def renderLogin(self, request): session = request.getSession() session.user_info = dict(username=bytes2unicode(self.username)) yield self.master.www.auth.updateUserInfo(request) raise _redirect(self.master, request) class LogoutResource(resource.Resource): def render_GET(self, request): session = request.getSession() session.expire() session.updateSession(request) request.redirect(_redirect(self.master, request).url) return b'' buildbot-2.6.0/master/buildbot/www/authz/000077500000000000000000000000001361162603000203775ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/www/authz/__init__.py000066400000000000000000000004121361162603000225050ustar00rootroot00000000000000from buildbot.www.authz.authz import Authz from buildbot.www.authz.authz import Forbidden from buildbot.www.authz.authz import fnmatchStrMatcher from buildbot.www.authz.authz import reStrMatcher __all__ = ["Authz", "fnmatchStrMatcher", "reStrMatcher", "Forbidden"] buildbot-2.6.0/master/buildbot/www/authz/authz.py000066400000000000000000000066351361162603000221160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import fnmatch import re from twisted.internet import defer from twisted.web.error import Error from zope.interface import implementer from buildbot.interfaces import IConfigured from buildbot.util import unicode2bytes from buildbot.www.authz.roles import RolesFromOwner class Forbidden(Error): def __init__(self, msg): super().__init__(403, msg) # fnmatch and re.match are reversed API, we cannot just rename them def fnmatchStrMatcher(value, match): return fnmatch.fnmatch(value, match) def reStrMatcher(value, match): return re.match(match, value) @implementer(IConfigured) class Authz: def getConfigDict(self): return {} def __init__(self, allowRules=None, roleMatchers=None, stringsMatcher=fnmatchStrMatcher): self.match = stringsMatcher if allowRules is None: allowRules = [] if roleMatchers is None: roleMatchers = [] self.allowRules = allowRules self.roleMatchers = [ r for r in roleMatchers if not isinstance(r, RolesFromOwner)] self.ownerRoleMatchers = [ r for r in roleMatchers if isinstance(r, RolesFromOwner)] def setMaster(self, master): self.master = master for r in self.roleMatchers + self.ownerRoleMatchers + self.allowRules: r.setAuthz(self) def getRolesFromUser(self, userDetails): roles = set() for roleMatcher in self.roleMatchers: roles.update(set(roleMatcher.getRolesFromUser(userDetails))) return roles def getOwnerRolesFromUser(self, userDetails, owner): roles = set() for roleMatcher in self.ownerRoleMatchers: roles.update(set(roleMatcher.getRolesFromUser(userDetails, owner))) return roles @defer.inlineCallbacks def assertUserAllowed(self, ep, action, options, userDetails): roles = self.getRolesFromUser(userDetails) for rule in self.allowRules: match = yield rule.match(ep, action, options) if match is not None: # only try to get owner if there are owner Matchers if self.ownerRoleMatchers: owner = yield match.getOwner() if owner is not None: roles.update( self.getOwnerRolesFromUser(userDetails, owner)) for role in roles: if self.match(role, rule.role): return None if not rule.defaultDeny: continue # check next suitable rule if not denied error_msg = unicode2bytes( "you need to have role '%s'" % rule.role) raise Forbidden(error_msg) return None buildbot-2.6.0/master/buildbot/www/authz/endpointmatchers.py000066400000000000000000000156701361162603000243310ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import inspect from twisted.internet import defer from buildbot.data.exceptions import InvalidPathError from buildbot.util import bytes2unicode class EndpointMatcherBase: def __init__(self, role, defaultDeny=True): self.role = role self.defaultDeny = defaultDeny self.owner = None def setAuthz(self, authz): self.authz = authz self.master = authz.master def match(self, ep, action="get", options=None): if options is None: options = {} try: epobject, epdict = self.master.data.getEndpoint(ep) for klass in inspect.getmro(epobject.__class__): m = getattr( self, "match_" + klass.__name__ + "_" + action, None) if m is not None: return m(epobject, epdict, options) m = getattr(self, "match_" + klass.__name__, None) if m is not None: return m(epobject, epdict, options) except InvalidPathError: return defer.succeed(None) return defer.succeed(None) def __repr__(self): # a repr for debugging. displays the class, and string attributes args = [] for k, v in self.__dict__.items(): if isinstance(v, str): args.append("%s='%s'" % (k, v)) return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) class Match: def __init__(self, master, build=None, buildrequest=None, buildset=None): self.master = master self.build = build self.buildrequest = buildrequest self.buildset = buildset def getOwner(self): if self.buildset: return self.getOwnerFromBuildset(self.buildset) elif self.buildrequest: return self.getOwnerFromBuildRequest(self.buildrequest) elif self.build: return self.getOwnerFromBuild(self.build) return defer.succeed(None) @defer.inlineCallbacks def getOwnerFromBuild(self, build): br = yield self.master.data.get(("buildrequests", build['buildrequestid'])) owner = yield self.getOwnerFromBuildRequest(br) return owner @defer.inlineCallbacks def getOwnerFromBuildsetOrBuildRequest(self, buildsetorbuildrequest): props = yield self.master.data.get(("buildsets", buildsetorbuildrequest['buildsetid'], "properties")) if 'owner' in props: return props['owner'][0] return None getOwnerFromBuildRequest = getOwnerFromBuildsetOrBuildRequest getOwnerFromBuildSet = getOwnerFromBuildsetOrBuildRequest class AnyEndpointMatcher(EndpointMatcherBase): def match(self, ep, action="get", options=None): return defer.succeed(Match(self.master)) class AnyControlEndpointMatcher(EndpointMatcherBase): def match(self, ep, action="", options=None): if bytes2unicode(action).lower() != "get": return defer.succeed(Match(self.master)) return defer.succeed(None) class StopBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def matchFromBuilderId(self, builderid): if builderid is not None: builder = yield self.master.data.get(('builders', builderid)) buildername = builder['name'] return self.authz.match(buildername, self.builder) return False @defer.inlineCallbacks def match_BuildEndpoint_stop(self, epobject, epdict, options): build = yield epobject.get({}, epdict) if self.builder is None: # no filtering needed: we match! return Match(self.master, build=build) # if filtering needed, we need to get some more info ret = yield self.matchFromBuilderId(build['builderid']) if ret: return Match(self.master, build=build) return None @defer.inlineCallbacks def match_BuildRequestEndpoint_stop(self, epobject, epdict, options): buildrequest = yield epobject.get({}, epdict) if self.builder is None: # no filtering needed: we match! return Match(self.master, buildrequest=buildrequest) # if filtering needed, we need to get some more info ret = yield self.matchFromBuilderId(buildrequest['builderid']) if ret: return Match(self.master, buildrequest=buildrequest) return None class ForceBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def match_ForceSchedulerEndpoint_force(self, epobject, epdict, options): if self.builder is None: # no filtering needed: we match without querying! return Match(self.master) sched = yield epobject.findForceScheduler(epdict['schedulername']) if sched is not None: builderNames = options.get('builderNames') builderid = options.get('builderid') builderNames = yield sched.computeBuilderNames(builderNames, builderid) for buildername in builderNames: if self.authz.match(buildername, self.builder): return Match(self.master) return None class RebuildBuildEndpointMatcher(EndpointMatcherBase): def __init__(self, builder=None, **kwargs): self.builder = builder super().__init__(**kwargs) @defer.inlineCallbacks def match_BuildEndpoint_rebuild(self, epobject, epdict, options): build = yield epobject.get({}, epdict) return Match(self.master, build=build) class EnableSchedulerEndpointMatcher(EndpointMatcherBase): def match_SchedulerEndpoint_enable(self, epobject, epdict, options): return defer.succeed(Match(self.master)) ##### # not yet implemented class ViewBuildsEndpointMatcher(EndpointMatcherBase): def __init__(self, branch=None, project=None, builder=None, **kwargs): super().__init__(**kwargs) self.branch = branch self.project = project self.builder = builder class BranchEndpointMatcher(EndpointMatcherBase): def __init__(self, branch, **kwargs): self.branch = branch super().__init__(**kwargs) buildbot-2.6.0/master/buildbot/www/authz/roles.py000066400000000000000000000057631361162603000221100ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members class RolesFromBase: def __init__(self): pass def getRolesFromUser(self, userDetails): return [] def setAuthz(self, authz): self.authz = authz self.master = authz.master class RolesFromGroups(RolesFromBase): def __init__(self, groupPrefix=""): super().__init__() self.groupPrefix = groupPrefix def getRolesFromUser(self, userDetails): roles = [] if 'groups' in userDetails: for group in userDetails['groups']: if group.startswith(self.groupPrefix): roles.append(group[len(self.groupPrefix):]) return roles class RolesFromEmails(RolesFromBase): def __init__(self, **kwargs): super().__init__() self.roles = {} for role, emails in kwargs.items(): for email in emails: self.roles.setdefault(email, []).append(role) def getRolesFromUser(self, userDetails): if 'email' in userDetails: return self.roles.get(userDetails['email'], []) return [] class RolesFromDomain(RolesFromEmails): def __init__(self, **kwargs): super().__init__() self.domain_roles = {} for role, domains in kwargs.items(): for domain in domains: self.domain_roles.setdefault(domain, []).append(role) def getRolesFromUser(self, userDetails): if 'email' in userDetails: email = userDetails['email'] edomain = email.split('@')[-1] return self.domain_roles.get(edomain, []) return [] class RolesFromOwner(RolesFromBase): def __init__(self, role): super().__init__() self.role = role def getRolesFromUser(self, userDetails, owner): if 'email' in userDetails: if userDetails['email'] == owner and owner is not None: return [self.role] return [] class RolesFromUsername(RolesFromBase): def __init__(self, roles, usernames): self.roles = roles if None in usernames: from buildbot import config config.error('Usernames cannot be None') self.usernames = usernames def getRolesFromUser(self, userDetails): if userDetails.get('username') in self.usernames: return self.roles return [] buildbot-2.6.0/master/buildbot/www/avatar.py000066400000000000000000000064621361162603000211040ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hashlib from urllib.parse import urlencode from urllib.parse import urljoin from twisted.internet import defer from buildbot.util import config from buildbot.util import unicode2bytes from buildbot.www import resource class AvatarBase(config.ConfiguredMixin): name = "noavatar" def getUserAvatar(self, email, size, defaultAvatarUrl): raise NotImplementedError() class AvatarGravatar(AvatarBase): name = "gravatar" # gravatar does not want intranet URL, which is most of where the bots are # just use same default as github (retro) default = "retro" def getUserAvatar(self, email, size, defaultAvatarUrl): # construct the url emailBytes = unicode2bytes(email.lower()) emailHash = hashlib.md5(emailBytes) gravatar_url = "//www.gravatar.com/avatar/" gravatar_url += emailHash.hexdigest() + "?" if self.default != "url": defaultAvatarUrl = self.default url = {'d': defaultAvatarUrl, 's': str(size)} sorted_url = sorted(url.items(), key=lambda x: x[0]) gravatar_url += urlencode(sorted_url) raise resource.Redirect(gravatar_url) class AvatarResource(resource.Resource): # enable reconfigResource calls needsReconfig = True defaultAvatarUrl = b"img/nobody.png" def reconfigResource(self, new_config): self.avatarMethods = new_config.www.get('avatar_methods', []) self.defaultAvatarFullUrl = urljoin( unicode2bytes(new_config.buildbotURL), unicode2bytes(self.defaultAvatarUrl)) self.cache = {} # ensure the avatarMethods is a iterable if isinstance(self.avatarMethods, AvatarBase): self.avatarMethods = (self.avatarMethods, ) def render_GET(self, request): return self.asyncRenderHelper(request, self.renderAvatar) @defer.inlineCallbacks def renderAvatar(self, request): email = request.args.get(b"email", [b""])[0] size = request.args.get(b"size", 32) r = None if self.cache.get(email): r = self.cache[email] for method in self.avatarMethods: try: res = yield method.getUserAvatar(email, size, self.defaultAvatarFullUrl) except resource.Redirect: if r is not None: self.cache[email] = r raise if res is not None: request.setHeader(b'content-type', res[0]) request.setHeader(b'content-length', unicode2bytes(str(len(res[1])))) request.write(res[1]) return raise resource.Redirect(self.defaultAvatarUrl) buildbot-2.6.0/master/buildbot/www/change_hook.py000066400000000000000000000153031361162603000220650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # code inspired/copied from contrib/github_buildbot # and inspired from code from the Chromium project # otherwise, Andrew Melo wrote the rest # but "the rest" is pretty minimal import re from datetime import datetime from twisted.internet import defer from twisted.python import log from twisted.web import server from buildbot.plugins.db import get_plugins from buildbot.util import bytes2unicode from buildbot.util import datetime2epoch from buildbot.util import unicode2bytes from buildbot.www import resource class ChangeHookResource(resource.Resource): # this is a cheap sort of template thingy contentType = "text/html; charset=utf-8" children = {} needsReconfig = True def __init__(self, dialects=None, master=None): """ The keys of 'dialects' select a modules to load under master/buildbot/www/hooks/ The value is passed to the module's getChanges function, providing configuration options to the dialect. """ super().__init__(master) if dialects is None: dialects = {} self.dialects = dialects self._dialect_handlers = {} self.request_dialect = None self._plugins = get_plugins("webhooks") def reconfigResource(self, new_config): self.dialects = new_config.www.get('change_hook_dialects', {}) def getChild(self, name, request): return self def render_GET(self, request): """ Responds to events and starts the build process different implementations can decide on what methods they will accept """ return self.render_POST(request) def render_POST(self, request): """ Responds to events and starts the build process different implementations can decide on what methods they will accept :arguments: request the http request object """ try: d = self.getAndSubmitChanges(request) except Exception: d = defer.fail() def ok(_): request.setResponseCode(202) request.finish() def err(why): code = 500 if why.check(ValueError): code = 400 msg = unicode2bytes(why.getErrorMessage()) else: log.err(why, "adding changes from web hook") msg = b'Error processing changes.' request.setResponseCode(code, msg) request.write(msg) request.finish() d.addCallbacks(ok, err) return server.NOT_DONE_YET @defer.inlineCallbacks def getAndSubmitChanges(self, request): changes, src = yield self.getChanges(request) if not changes: request.write(b"no change found") else: yield self.submitChanges(changes, request, src) request.write(unicode2bytes("{} change found".format(len(changes)))) def makeHandler(self, dialect): """create and cache the handler object for this dialect""" if dialect not in self.dialects: m = "The dialect specified, '{}', wasn't whitelisted in change_hook".format(dialect) log.msg(m) log.msg( "Note: if dialect is 'base' then it's possible your URL is malformed and we didn't regex it properly") raise ValueError(m) if dialect not in self._dialect_handlers: if dialect not in self._plugins: m = "The dialect specified, '{}', is not registered as a buildbot.webhook plugin".format(dialect) log.msg(m) raise ValueError(m) options = self.dialects[dialect] if isinstance(options, dict) and 'custom_class' in options: klass = options['custom_class'] else: klass = self._plugins.get(dialect) self._dialect_handlers[dialect] = klass(self.master, self.dialects[dialect]) return self._dialect_handlers[dialect] @defer.inlineCallbacks def getChanges(self, request): """ Take the logic from the change hook, and then delegate it to the proper handler We use the buildbot plugin mechanisms to find out about dialects and call getChanges() the return value is a list of changes if DIALECT is unspecified, a sample implementation is provided """ uriRE = re.search(r'^/change_hook/?([a-zA-Z0-9_]*)', bytes2unicode(request.uri)) if not uriRE: log.msg("URI doesn't match change_hook regex: %s" % request.uri) raise ValueError( "URI doesn't match change_hook regex: %s" % request.uri) changes = [] src = None # Was there a dialect provided? if uriRE.group(1): dialect = uriRE.group(1) else: dialect = 'base' handler = self.makeHandler(dialect) changes, src = yield handler.getChanges(request) return (changes, src) @defer.inlineCallbacks def submitChanges(self, changes, request, src): for chdict in changes: when_timestamp = chdict.get('when_timestamp') if isinstance(when_timestamp, datetime): chdict['when_timestamp'] = datetime2epoch(when_timestamp) # unicodify stuff for k in ('comments', 'author', 'committer', 'revision', 'branch', 'category', 'revlink', 'repository', 'codebase', 'project'): if k in chdict: chdict[k] = bytes2unicode(chdict[k]) if chdict.get('files'): chdict['files'] = [bytes2unicode(f) for f in chdict['files']] if chdict.get('properties'): chdict['properties'] = dict((bytes2unicode(k), v) for k, v in chdict['properties'].items()) chid = yield self.master.data.updates.addChange(src=bytes2unicode(src), **chdict) log.msg("injected change %s" % chid) buildbot-2.6.0/master/buildbot/www/config.py000066400000000000000000000136601361162603000210710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import os import posixpath import jinja2 from twisted.internet import defer from twisted.python import log from twisted.web.error import Error from buildbot.interfaces import IConfigured from buildbot.util import unicode2bytes from buildbot.www import resource class IndexResource(resource.Resource): # enable reconfigResource calls needsReconfig = True def __init__(self, master, staticdir): super().__init__(master) loader = jinja2.FileSystemLoader(staticdir) self.jinja = jinja2.Environment( loader=loader, undefined=jinja2.StrictUndefined) def reconfigResource(self, new_config): self.config = new_config.www versions = self.getEnvironmentVersions() vs = self.config.get('versions') if isinstance(vs, list): versions += vs self.config['versions'] = versions self.custom_templates = {} template_dir = self.config.pop('custom_templates_dir', None) if template_dir is not None: template_dir = os.path.join(self.master.basedir, template_dir) self.custom_templates = self.parseCustomTemplateDir(template_dir) def render_GET(self, request): return self.asyncRenderHelper(request, self.renderIndex) def parseCustomTemplateDir(self, template_dir): res = {} allowed_ext = [".html"] try: import pyjade # pylint: disable=import-outside-toplevel allowed_ext.append(".jade") except ImportError: # pragma: no cover log.msg("pyjade not installed. Ignoring .jade files from %s" % (template_dir,)) pyjade = None for root, dirs, files in os.walk(template_dir): if root == template_dir: template_name = posixpath.join("views", "%s.html") else: # template_name is a url, so we really want '/' # root is a os.path, though template_name = posixpath.join( os.path.basename(root), "views", "%s.html") for f in files: fn = os.path.join(root, f) basename, ext = os.path.splitext(f) if ext not in allowed_ext: continue if ext == ".html": with open(fn) as f: html = f.read().strip() elif ext == ".jade": with open(fn) as f: jade = f.read() parser = pyjade.parser.Parser(jade) block = parser.parse() compiler = pyjade.ext.html.Compiler( block, pretty=False) html = compiler.compile() res[template_name % (basename,)] = html return res @staticmethod def getEnvironmentVersions(): import sys # pylint: disable=import-outside-toplevel import twisted # pylint: disable=import-outside-toplevel from buildbot import version as bbversion # pylint: disable=import-outside-toplevel pyversion = '.'.join(map(str, sys.version_info[:3])) tx_version_info = (twisted.version.major, twisted.version.minor, twisted.version.micro) txversion = '.'.join(map(str, tx_version_info)) return [ ('Python', pyversion), ('Buildbot', bbversion), ('Twisted', txversion), ] @defer.inlineCallbacks def renderIndex(self, request): config = {} request.setHeader(b"content-type", b'text/html') request.setHeader(b"Cache-Control", b"public;max-age=0") try: yield self.config['auth'].maybeAutoLogin(request) except Error as e: config["on_load_warning"] = e.message user_info = self.master.www.getUserInfos(request) config.update({"user": user_info}) config.update(self.config) config['buildbotURL'] = self.master.config.buildbotURL config['title'] = self.master.config.title config['titleURL'] = self.master.config.titleURL config['multiMaster'] = self.master.config.multiMaster # delete things that may contain secrets if 'change_hook_dialects' in config: del config['change_hook_dialects'] def toJson(obj): try: obj = IConfigured(obj).getConfigDict() except TypeError: # this happens for old style classes (not deriving objects) pass if isinstance(obj, dict): return obj # don't leak object memory address obj = obj.__class__.__module__ + "." + obj.__class__.__name__ return repr(obj) + " not yet IConfigured" tpl = self.jinja.get_template('index.html') # we use Jinja in order to render some server side dynamic stuff # For example, custom_templates javascript is generated by the # layout.jade jinja template tpl = tpl.render(configjson=json.dumps(config, default=toJson), custom_templates=self.custom_templates, config=self.config) return unicode2bytes(tpl, encoding='ascii') buildbot-2.6.0/master/buildbot/www/hooks/000077500000000000000000000000001361162603000203675ustar00rootroot00000000000000buildbot-2.6.0/master/buildbot/www/hooks/__init__.py000066400000000000000000000000071361162603000224750ustar00rootroot00000000000000# test buildbot-2.6.0/master/buildbot/www/hooks/base.py000066400000000000000000000067341361162603000216650ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # code inspired/copied from contrib/github_buildbot # and inspired from code from the Chromium project # otherwise, Andrew Melo wrote the rest # but "the rest" is pretty minimal import json from buildbot.util import bytes2unicode class BaseHookHandler: def __init__(self, master, options): self.master = master self.options = options def getChanges(self, request): """ Consumes a naive build notification (the default for now) basically, set POST variables to match commit object parameters: revision, revlink, comments, branch, who, files, links files, links and properties will be de-json'd, the rest are interpreted as strings """ def firstOrNothing(value): """ Small helper function to return the first value (if value is a list) or return the whole thing otherwise. Make sure to properly decode bytes to unicode strings. """ if (isinstance(value, type([]))): value = value[0] return bytes2unicode(value) args = request.args # first, convert files, links and properties files = None if args.get(b'files'): files = json.loads(firstOrNothing(args.get(b'files'))) else: files = [] properties = None if args.get(b'properties'): properties = json.loads(firstOrNothing(args.get(b'properties'))) else: properties = {} revision = firstOrNothing(args.get(b'revision')) when = firstOrNothing(args.get(b'when_timestamp')) if when is None: when = firstOrNothing(args.get(b'when')) if when is not None: when = float(when) author = firstOrNothing(args.get(b'author')) if not author: author = firstOrNothing(args.get(b'who')) committer = firstOrNothing(args.get(b'committer')) comments = firstOrNothing(args.get(b'comments')) branch = firstOrNothing(args.get(b'branch')) category = firstOrNothing(args.get(b'category')) revlink = firstOrNothing(args.get(b'revlink')) repository = firstOrNothing(args.get(b'repository')) or '' project = firstOrNothing(args.get(b'project')) or '' codebase = firstOrNothing(args.get(b'codebase')) chdict = dict(author=author, committer=committer, files=files, comments=comments, revision=revision, when_timestamp=when, branch=branch, category=category, revlink=revlink, properties=properties, repository=repository, project=project, codebase=codebase) return ([chdict], None) base = BaseHookHandler # alternate name for buildbot plugin buildbot-2.6.0/master/buildbot/www/hooks/bitbucket.py000066400000000000000000000051421361162603000227170ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright 2013 (c) Mamba Team import json from dateutil.parser import parse as dateparse from twisted.python import log from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-Event-Key' class BitBucketHandler(BaseHookHandler): def getChanges(self, request): """Catch a POST request from BitBucket and start a build process Check the URL below if you require more information about payload https://confluence.atlassian.com/display/BITBUCKET/POST+Service+Management :param request: the http request Twisted object :param options: additional options """ event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) payload = json.loads(bytes2unicode(request.args[b'payload'][0])) repo_url = '{}{}'.format( payload['canon_url'], payload['repository']['absolute_url']) project = request.args.get(b'project', [b''])[0] project = bytes2unicode(project) changes = [] for commit in payload['commits']: changes.append({ 'author': commit['raw_author'], 'files': [f['file'] for f in commit['files']], 'comments': commit['message'], 'revision': commit['raw_node'], 'when_timestamp': dateparse(commit['utctimestamp']), 'branch': commit['branch'], 'revlink': '{}commits/{}'.format(repo_url, commit['raw_node']), 'repository': repo_url, 'project': project, 'properties': { 'event': event_type, }, }) log.msg('New revision: {}'.format(commit['node'])) log.msg('Received {} changes from bitbucket'.format(len(changes))) return (changes, payload['repository']['scm']) bitbucket = BitBucketHandler buildbot-2.6.0/master/buildbot/www/hooks/bitbucketcloud.py000066400000000000000000000140471361162603000237520ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team import json from twisted.python import log from buildbot.util import bytes2unicode GIT_BRANCH_REF = "refs/heads/{}" GIT_MERGE_REF = "refs/pull-requests/{}/merge" GIT_TAG_REF = "refs/tags/{}" _HEADER_EVENT = b'X-Event-Key' class BitbucketCloudEventHandler: def __init__(self, master, options=None): if options is None: options = {} self.master = master if not isinstance(options, dict): options = {} self.options = options self._codebase = self.options.get('codebase', None) def process(self, request): payload = self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("Processing event {header}: {event}" .format(header=_HEADER_EVENT, event=event_type)) event_type = event_type.replace(":", "_") handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) return handler(payload) def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type.startswith('application/json'): payload = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) log.msg("Payload: {}".format(payload)) return payload def handle_repo_push(self, payload): changes = [] project = payload['repository'].get('project', {'name': 'none'})['name'] repo_url = payload['repository']['links']['self']['href'] web_url = payload['repository']['links']['html']['href'] for payload_change in payload['push']['changes']: if payload_change['new']: age = 'new' category = 'push' else: # when new is null the ref is deleted age = 'old' category = 'ref-deleted' commit_hash = payload_change[age]['target']['hash'] if payload_change[age]['type'] == 'branch': branch = GIT_BRANCH_REF.format(payload_change[age]['name']) elif payload_change[age]['type'] == 'tag': branch = GIT_TAG_REF.format(payload_change[age]['name']) change = { 'revision': commit_hash, 'revlink': '{}/commits/{}'.format(web_url, commit_hash), 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['display_name'], payload['actor']['nickname']), 'comments': 'Bitbucket Cloud commit {}'.format(commit_hash), 'branch': branch, 'project': project, 'category': category } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return (changes, payload['repository']['scm']) def handle_pullrequest_created(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-created") def handle_pullrequest_updated(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-updated") def handle_pullrequest_fulfilled(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['toRef']['branch']['name']), "pull-fulfilled") def handle_pullrequest_rejected(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['fromRef']['branch']['name']), "pull-rejected") def handle_pullrequest(self, payload, refname, category): pr_number = int(payload['pullrequest']['id']) repo_url = payload['repository']['links']['self']['href'] project = payload['repository'].get('project', {'name': 'none'})['name'] change = { 'revision': payload['pullrequest']['fromRef']['commit']['hash'], 'revlink': payload['pullrequest']['link'], 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['display_name'], payload['actor']['nickname']), 'comments': 'Bitbucket Cloud Pull Request #{}'.format(pr_number), 'branch': refname, 'project': project, 'category': category, 'properties': {'pullrequesturl': payload['pullrequest']['link']} } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase return [change], payload['repository']['scm'] def getChanges(self, request): return self.process(request) bitbucketcloud = BitbucketCloudEventHandler buildbot-2.6.0/master/buildbot/www/hooks/bitbucketserver.py000066400000000000000000000147531361162603000241560ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # Copyright Mamba Team import json from twisted.python import log from buildbot.util import bytes2unicode GIT_BRANCH_REF = "refs/heads/{}" GIT_MERGE_REF = "refs/pull-requests/{}/merge" GIT_TAG_REF = "refs/tags/{}" _HEADER_EVENT = b'X-Event-Key' class BitbucketServerEventHandler: def __init__(self, master, options=None): if options is None: options = {} self.master = master if not isinstance(options, dict): options = {} self.options = options self._codebase = self.options.get('codebase', None) def process(self, request): payload = self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("Processing event {header}: {event}" .format(header=_HEADER_EVENT, event=event_type)) event_type = event_type.replace(":", "_") handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) return handler(payload) def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) content_type = request.getHeader(b'Content-Type') content_type = bytes2unicode(content_type) if content_type.startswith('application/json'): payload = json.loads(content) else: raise ValueError('Unknown content type: {}' .format(content_type)) log.msg("Payload: {}".format(payload)) return payload def handle_repo_refs_changed(self, payload): return self._handle_repo_refs_changed_common(payload) def handle_repo_push(self, payload): # repo:push works exactly like repo:refs_changed, but is no longer documented (not even # in the historical documentation of old versions of Bitbucket Server). The old code path # has been preserved for backwards compatibility. return self._handle_repo_refs_changed_common(payload) def _handle_repo_refs_changed_common(self, payload): changes = [] project = payload['repository']['project']['name'] repo_url = payload['repository']['links']['self'][0]['href'] repo_url = repo_url.rstrip('browse') for payload_change in payload['push']['changes']: if payload_change['new']: age = 'new' category = 'push' else: # when new is null the ref is deleted age = 'old' category = 'ref-deleted' commit_hash = payload_change[age]['target']['hash'] if payload_change[age]['type'] == 'branch': branch = GIT_BRANCH_REF.format(payload_change[age]['name']) elif payload_change[age]['type'] == 'tag': branch = GIT_TAG_REF.format(payload_change[age]['name']) change = { 'revision': commit_hash, 'revlink': '{}commits/{}'.format(repo_url, commit_hash), 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['displayName'], payload['actor']['username']), 'comments': 'Bitbucket Server commit {}'.format(commit_hash), 'branch': branch, 'project': project, 'category': category } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return (changes, payload['repository']['scmId']) def handle_pullrequest_created(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-created") def handle_pullrequest_updated(self, payload): return self.handle_pullrequest( payload, GIT_MERGE_REF.format(int(payload['pullrequest']['id'])), "pull-updated") def handle_pullrequest_fulfilled(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['toRef']['branch']['name']), "pull-fulfilled") def handle_pullrequest_rejected(self, payload): return self.handle_pullrequest( payload, GIT_BRANCH_REF.format( payload['pullrequest']['fromRef']['branch']['name']), "pull-rejected") def handle_pullrequest(self, payload, refname, category): pr_number = int(payload['pullrequest']['id']) repo_url = payload['repository']['links']['self'][0]['href'] repo_url = repo_url.rstrip('browse') change = { 'revision': payload['pullrequest']['fromRef']['commit']['hash'], 'revlink': payload['pullrequest']['link'], 'repository': repo_url, 'author': '{} <{}>'.format(payload['actor']['displayName'], payload['actor']['username']), 'comments': 'Bitbucket Server Pull Request #{}'.format(pr_number), 'branch': refname, 'project': payload['repository']['project']['name'], 'category': category, 'properties': {'pullrequesturl': payload['pullrequest']['link']} } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase return [change], payload['repository']['scmId'] def getChanges(self, request): return self.process(request) bitbucketserver = BitbucketServerEventHandler buildbot-2.6.0/master/buildbot/www/hooks/github.py000066400000000000000000000323641361162603000222330ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import hmac import json import logging import re from hashlib import sha1 from dateutil.parser import parse as dateparse from twisted.internet import defer from twisted.python import log from buildbot.changes.github import PullRequestMixin from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util import httpclientservice from buildbot.util import unicode2bytes from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-GitHub-Event' _HEADER_SIGNATURE = b'X-Hub-Signature' DEFAULT_SKIPS_PATTERN = (r'\[ *skip *ci *\]', r'\[ *ci *skip *\]') DEFAULT_GITHUB_API_URL = 'https://api.github.com' class GitHubEventHandler(PullRequestMixin): def __init__(self, secret, strict, codebase=None, github_property_whitelist=None, master=None, skips=None, github_api_endpoint=None, pullrequest_ref=None, token=None, debug=False, verify=False): self._secret = secret self._strict = strict self._token = token self._codebase = codebase self.pullrequest_ref = pullrequest_ref self.github_property_whitelist = github_property_whitelist self.skips = skips self.github_api_endpoint = github_api_endpoint self.master = master if github_property_whitelist is None: self.github_property_whitelist = [] if skips is None: self.skips = DEFAULT_SKIPS_PATTERN if github_api_endpoint is None: self.github_api_endpoint = DEFAULT_GITHUB_API_URL if self._strict and not self._secret: raise ValueError('Strict mode is requested ' 'while no secret is provided') self.debug = debug self.verify = verify @defer.inlineCallbacks def process(self, request): payload = yield self._get_payload(request) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) log.msg("X-GitHub-Event: {}".format( event_type), logLevel=logging.DEBUG) handler = getattr(self, 'handle_{}'.format(event_type), None) if handler is None: raise ValueError('Unknown event: {}'.format(event_type)) result = yield defer.maybeDeferred(lambda: handler(payload, event_type)) return result @defer.inlineCallbacks def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) signature = request.getHeader(_HEADER_SIGNATURE) signature = bytes2unicode(signature) if not signature and self._strict: raise ValueError('Request has no required signature') if self._secret and signature: try: hash_type, hexdigest = signature.split('=') except ValueError: raise ValueError( 'Wrong signature format: {}'.format(signature)) if hash_type != 'sha1': raise ValueError('Unknown hash type: {}'.format(hash_type)) p = Properties() p.master = self.master rendered_secret = yield p.render(self._secret) mac = hmac.new(unicode2bytes(rendered_secret), msg=unicode2bytes(content), digestmod=sha1) def _cmp(a, b): try: # try the more secure compare_digest() first from hmac import compare_digest return compare_digest(a, b) except ImportError: # pragma: no cover # and fallback to the insecure simple comparison otherwise return a == b if not _cmp(bytes2unicode(mac.hexdigest()), hexdigest): raise ValueError('Hash mismatch') content_type = request.getHeader(b'Content-Type') if content_type == b'application/json': payload = json.loads(content) elif content_type == b'application/x-www-form-urlencoded': payload = json.loads(bytes2unicode(request.args[b'payload'][0])) else: raise ValueError('Unknown content type: {}'.format(content_type)) log.msg("Payload: {}".format(payload), logLevel=logging.DEBUG) return payload def handle_ping(self, _, __): return [], 'git' def handle_push(self, payload, event): # This field is unused: user = None # user = payload['pusher']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['html_url'] # NOTE: what would be a reasonable value for project? # project = request.args.get('project', [''])[0] project = payload['repository']['full_name'] # Inject some additional white-listed event payload properties properties = self.extractProperties(payload) changes = self._process_change(payload, user, repo, repo_url, project, event, properties) log.msg("Received {} changes from github".format(len(changes))) return changes, 'git' @defer.inlineCallbacks def handle_pull_request(self, payload, event): changes = [] number = payload['number'] refname = 'refs/pull/{}/{}'.format(number, self.pullrequest_ref) basename = payload['pull_request']['base']['ref'] commits = payload['pull_request']['commits'] title = payload['pull_request']['title'] comments = payload['pull_request']['body'] repo_full_name = payload['repository']['full_name'] head_sha = payload['pull_request']['head']['sha'] log.msg('Processing GitHub PR #{}'.format(number), logLevel=logging.DEBUG) head_msg = yield self._get_commit_msg(repo_full_name, head_sha) if self._has_skip(head_msg): log.msg("GitHub PR #{}, Ignoring: " "head commit message contains skip pattern".format(number)) return ([], 'git') action = payload.get('action') if action not in ('opened', 'reopened', 'synchronize'): log.msg("GitHub PR #{} {}, ignoring".format(number, action)) return (changes, 'git') properties = self.extractProperties(payload['pull_request']) properties.update({'event': event}) properties.update({'basename': basename}) change = { 'revision': payload['pull_request']['head']['sha'], 'when_timestamp': dateparse(payload['pull_request']['created_at']), 'branch': refname, 'revlink': payload['pull_request']['_links']['html']['href'], 'repository': payload['repository']['html_url'], 'project': payload['pull_request']['base']['repo']['full_name'], 'category': 'pull', # TODO: Get author name based on login id using txgithub module 'author': payload['sender']['login'], 'comments': 'GitHub Pull Request #{0} ({1} commit{2})\n{3}\n{4}'.format( number, commits, 's' if commits != 1 else '', title, comments), 'properties': properties, } if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) log.msg("Received {} changes from GitHub PR #{}".format( len(changes), number)) return (changes, 'git') @defer.inlineCallbacks def _get_commit_msg(self, repo, sha): ''' :param repo: the repo full name, ``{owner}/{project}``. e.g. ``buildbot/buildbot`` ''' headers = { 'User-Agent': 'Buildbot' } if self._token: headers['Authorization'] = 'token ' + self._token url = '/repos/{}/commits/{}'.format(repo, sha) http = yield httpclientservice.HTTPClientService.getService( self.master, self.github_api_endpoint, headers=headers, debug=self.debug, verify=self.verify) res = yield http.get(url) data = yield res.json() msg = data.get('commit', {'message': 'No message field'})['message'] return msg def _process_change(self, payload, user, repo, repo_url, project, event, properties): """ Consumes the JSON as a python object and actually starts the build. :arguments: payload Python Object that represents the JSON sent by GitHub Service Hook. """ changes = [] refname = payload['ref'] # We only care about regular heads or tags match = re.match(r"^refs/(heads|tags)/(.+)$", refname) if not match: log.msg("Ignoring refname `{}': Not a branch".format(refname)) return changes category = None # None is the legacy category for when hook only supported push if match.group(1) == "tags": category = "tag" branch = match.group(2) if payload.get('deleted'): log.msg("Branch `{}' deleted, ignoring".format(branch)) return changes # check skip pattern in commit message. e.g.: [ci skip] and [skip ci] head_msg = payload['head_commit'].get('message', '') if self._has_skip(head_msg): return changes commits = payload['commits'] if payload.get('created'): commits = [payload['head_commit']] for commit in commits: files = [] for kind in ('added', 'modified', 'removed'): files.extend(commit.get(kind, [])) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: {}".format(commit['id'][:8])) change = { 'author': '{} <{}>'.format(commit['author']['name'], commit['author']['email']), 'committer': '{} <{}>'.format(commit['committer']['name'], commit['committer']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project, 'properties': { 'github_distinct': commit.get('distinct', True), 'event': event, }, 'category': category } # Update with any white-listed github event properties change['properties'].update(properties) if callable(self._codebase): change['codebase'] = self._codebase(payload) elif self._codebase is not None: change['codebase'] = self._codebase changes.append(change) return changes def _has_skip(self, msg): ''' The message contains the skipping keyword no not. :return type: Bool ''' for skip in self.skips: if re.search(skip, msg): return True return False # for GitHub, we do another level of indirection because # we already had documented API that encouraged people to subclass GitHubEventHandler # so we need to be careful not breaking that API. class GitHubHandler(BaseHookHandler): def __init__(self, master, options): if options is None: options = {} super().__init__(master, options) klass = options.get('class', GitHubEventHandler) klass_kwargs = { 'master': master, 'codebase': options.get('codebase', None), 'github_property_whitelist': options.get('github_property_whitelist', None), 'skips': options.get('skips', None), 'github_api_endpoint': options.get('github_api_endpoint', None) or 'https://api.github.com', 'pullrequest_ref': options.get('pullrequest_ref', None) or 'merge', 'token': options.get('token', None), 'debug': options.get('debug', None) or False, 'verify': options.get('verify', None) or False, } handler = klass(options.get('secret', None), options.get('strict', False), **klass_kwargs) self.handler = handler def getChanges(self, request): return self.handler.process(request) github = GitHubHandler buildbot-2.6.0/master/buildbot/www/hooks/gitlab.py000066400000000000000000000174471361162603000222200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re from dateutil.parser import parse as dateparse from twisted.internet.defer import inlineCallbacks from twisted.python import log from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler _HEADER_EVENT = b'X-Gitlab-Event' _HEADER_GITLAB_TOKEN = b'X-Gitlab-Token' class GitLabHandler(BaseHookHandler): def _process_change(self, payload, user, repo, repo_url, event, codebase=None): """ Consumes the JSON as a python object and actually starts the build. :arguments: payload Python Object that represents the JSON sent by GitLab Service Hook. """ changes = [] refname = payload['ref'] # project name from http headers is empty for me, so get it from repository/name project = payload['repository']['name'] # We only care about regular heads or tags match = re.match(r"^refs/(heads|tags)/(.+)$", refname) if not match: log.msg("Ignoring refname `%s': Not a branch" % refname) return changes branch = match.group(2) if payload.get('deleted'): log.msg("Branch `%s' deleted, ignoring" % branch) return changes for commit in payload['commits']: if not commit.get('distinct', True): log.msg('Commit `%s` is a non-distinct commit, ignoring...' % (commit['id'],)) continue files = [] for kind in ('added', 'modified', 'removed'): files.extend(commit.get(kind, [])) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: %s" % commit['id'][:8]) change = { 'author': '%s <%s>' % (commit['author']['name'], commit['author']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project, 'category': event, 'properties': { 'event': event, }, } if codebase is not None: change['codebase'] = codebase changes.append(change) return changes def _process_merge_request_change(self, payload, event, codebase=None): """ Consumes the merge_request JSON as a python object and turn it into a buildbot change. :arguments: payload Python Object that represents the JSON sent by GitLab Service Hook. """ attrs = payload['object_attributes'] commit = attrs['last_commit'] when_timestamp = dateparse(commit['timestamp']) # @todo provide and document a way to choose between http and ssh url repo_url = attrs['target']['git_http_url'] # project name from http headers is empty for me, so get it from object_attributes/target/name project = attrs['target']['name'] # Filter out uninteresting events state = attrs['state'] if re.match('^(closed|merged|approved)$', state): log.msg("GitLab MR#{}: Ignoring because state is {}".format(attrs['iid'], state)) return [] action = attrs['action'] if not re.match('^(open|reopen)$', action) and not (action == "update" and "oldrev" in attrs): log.msg("GitLab MR#{}: Ignoring because action {} was not open or " "reopen or an update that added code".format(attrs['iid'], action)) return [] changes = [{ 'author': '%s <%s>' % (commit['author']['name'], commit['author']['email']), 'files': [], # @todo use rest API 'comments': "MR#{}: {}\n\n{}".format(attrs['iid'], attrs['title'], attrs['description']), 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': attrs['target_branch'], 'repository': repo_url, 'project': project, 'category': event, 'revlink': attrs['url'], 'properties': { 'source_branch': attrs['source_branch'], 'source_project_id': attrs['source_project_id'], 'source_repository': attrs['source']['git_http_url'], 'source_git_ssh_url': attrs['source']['git_ssh_url'], 'target_branch': attrs['target_branch'], 'target_project_id': attrs['target_project_id'], 'target_repository': attrs['target']['git_http_url'], 'target_git_ssh_url': attrs['target']['git_ssh_url'], 'event': event, }, }] if codebase is not None: changes[0]['codebase'] = codebase return changes @inlineCallbacks def getChanges(self, request): """ Reponds only to POST events and starts the build process :arguments: request the http request object """ expected_secret = isinstance(self.options, dict) and self.options.get('secret') if expected_secret: received_secret = request.getHeader(_HEADER_GITLAB_TOKEN) received_secret = bytes2unicode(received_secret) p = Properties() p.master = self.master expected_secret_value = yield p.render(expected_secret) if received_secret != expected_secret_value: raise ValueError("Invalid secret") try: content = request.content.read() payload = json.loads(bytes2unicode(content)) except Exception as e: raise ValueError("Error loading JSON: " + str(e)) event_type = request.getHeader(_HEADER_EVENT) event_type = bytes2unicode(event_type) # newer version of gitlab have a object_kind parameter, # which allows not to use the http header event_type = payload.get('object_kind', event_type) codebase = request.args.get(b'codebase', [None])[0] codebase = bytes2unicode(codebase) if event_type in ("push", "tag_push", "Push Hook"): user = payload['user_name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] changes = self._process_change( payload, user, repo, repo_url, event_type, codebase=codebase) elif event_type == 'merge_request': changes = self._process_merge_request_change( payload, event_type, codebase=codebase) else: changes = [] if changes: log.msg("Received {} changes from {} gitlab event".format( len(changes), event_type)) return (changes, 'git') gitlab = GitLabHandler buildbot-2.6.0/master/buildbot/www/hooks/gitorious.py000066400000000000000000000055371361162603000227770ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # # note: this file is based on github.py import json import re from dateutil.parser import parse as dateparse from twisted.python import log from buildbot.util import bytes2unicode from buildbot.www.hooks.base import BaseHookHandler class GitoriousHandler(BaseHookHandler): def getChanges(self, request): payload = json.loads(bytes2unicode(request.args[b'payload'][0])) user = payload['repository']['owner']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] project = payload['project']['name'] changes = self.process_change(payload, user, repo, repo_url, project) log.msg("Received %s changes from gitorious" % len(changes)) return (changes, 'git') def process_change(self, payload, user, repo, repo_url, project): changes = [] newrev = payload['after'] branch = payload['ref'] if re.match(r"^0*$", newrev): log.msg("Branch `%s' deleted, ignoring" % branch) return [] else: for commit in payload['commits']: files = [] # Gitorious doesn't send these, maybe later # if 'added' in commit: # files.extend(commit['added']) # if 'modified' in commit: # files.extend(commit['modified']) # if 'removed' in commit: # files.extend(commit['removed']) when_timestamp = dateparse(commit['timestamp']) log.msg("New revision: %s" % commit['id'][:8]) changes.append({ 'author': '%s <%s>' % (commit['author']['name'], commit['author']['email']), 'files': files, 'comments': commit['message'], 'revision': commit['id'], 'when_timestamp': when_timestamp, 'branch': branch, 'revlink': commit['url'], 'repository': repo_url, 'project': project }) return changes gitorious = GitoriousHandler buildbot-2.6.0/master/buildbot/www/hooks/poller.py000066400000000000000000000043201361162603000222350ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # This change hook allows GitHub or a hand crafted curl invocation to "knock on # the door" and trigger a change source to poll. from buildbot.changes.base import PollingChangeSource from buildbot.util import bytes2unicode from buildbot.util import unicode2bytes from buildbot.www.hooks.base import BaseHookHandler class PollingHandler(BaseHookHandler): def getChanges(self, req): change_svc = req.site.master.change_svc poll_all = b"poller" not in req.args allow_all = True allowed = [] if isinstance(self.options, dict) and b"allowed" in self.options: allow_all = False allowed = self.options[b"allowed"] pollers = [] for source in change_svc: if not isinstance(source, PollingChangeSource): continue if not hasattr(source, "name"): continue if (not poll_all and unicode2bytes(source.name) not in req.args[b'poller']): continue if not allow_all and unicode2bytes(source.name) not in allowed: continue pollers.append(source) if not poll_all: missing = (set(req.args[b'poller']) - set(unicode2bytes(s.name) for s in pollers)) if missing: raise ValueError("Could not find pollers: {}".format( bytes2unicode(b",".join(missing)))) for p in pollers: p.force() return [], None poller = PollingHandler buildbot-2.6.0/master/buildbot/www/ldapuserinfo.py000066400000000000000000000144441361162603000223200ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members # NOTE regarding LDAP encodings: # # By default the encoding used in ldap3 is utf-8. The encoding is user-configurable, though. # For more information check ldap3's documentation on this topic: # http://ldap3.readthedocs.io/encoding.html # # It is recommended to use ldap3's auto-decoded `attributes` values for # `unicode` and `raw_*` attributes for `bytes`. from urllib.parse import urlparse import ldap3 from twisted.internet import threads from buildbot.util import bytes2unicode from buildbot.util import flatten from buildbot.www import auth from buildbot.www import avatar class LdapUserInfo(avatar.AvatarBase, auth.UserInfoProviderBase): name = 'ldap' def __init__(self, uri, bindUser, bindPw, accountBase, accountPattern, accountFullName, accountEmail, groupBase=None, groupMemberPattern=None, groupName=None, avatarPattern=None, avatarData=None, accountExtraFields=None): self.uri = uri self.bindUser = bindUser self.bindPw = bindPw self.accountBase = accountBase self.accountEmail = accountEmail self.accountPattern = accountPattern self.accountFullName = accountFullName group_params = [p for p in (groupName, groupMemberPattern, groupBase) if p is not None] if len(group_params) not in (0, 3): raise ValueError( "Incomplete LDAP groups configuration. " "To use Ldap groups, you need to specify the three " "parameters (groupName, groupMemberPattern and groupBase). ") self.groupName = groupName self.groupMemberPattern = groupMemberPattern self.groupBase = groupBase self.avatarPattern = avatarPattern self.avatarData = avatarData if accountExtraFields is None: accountExtraFields = [] self.accountExtraFields = accountExtraFields self.ldap_encoding = ldap3.get_config_parameter('DEFAULT_SERVER_ENCODING') def connectLdap(self): server = urlparse(self.uri) netloc = server.netloc.split(":") # define the server and the connection s = ldap3.Server(netloc[0], port=int(netloc[1]), use_ssl=server.scheme == 'ldaps', get_info=ldap3.ALL) auth = ldap3.SIMPLE if self.bindUser is None and self.bindPw is None: auth = ldap3.ANONYMOUS c = ldap3.Connection(s, auto_bind=True, client_strategy=ldap3.SYNC, user=self.bindUser, password=self.bindPw, authentication=auth) return c def search(self, c, base, filterstr='f', attributes=None): c.search(base, filterstr, ldap3.SUBTREE, attributes=attributes) return c.response def getUserInfo(self, username): username = bytes2unicode(username) def thd(): c = self.connectLdap() infos = {'username': username} pattern = self.accountPattern % dict(username=username) res = self.search(c, self.accountBase, pattern, attributes=[ self.accountEmail, self.accountFullName] + self.accountExtraFields) if len(res) != 1: raise KeyError( "ldap search \"%s\" returned %d results" % (pattern, len(res))) dn, ldap_infos = res[0]['dn'], res[0]['attributes'] def getFirstLdapInfo(x): if isinstance(x, list): x = x[0] if x else None return x infos['full_name'] = getFirstLdapInfo(ldap_infos[self.accountFullName]) infos['email'] = getFirstLdapInfo(ldap_infos[self.accountEmail]) for f in self.accountExtraFields: if f in ldap_infos: infos[f] = getFirstLdapInfo(ldap_infos[f]) if self.groupMemberPattern is None: infos['groups'] = [] return infos # needs double quoting of backslashing pattern = self.groupMemberPattern % dict(dn=dn) res = self.search(c, self.groupBase, pattern, attributes=[self.groupName]) infos['groups'] = flatten([group_infos['attributes'][self.groupName] for group_infos in res]) return infos return threads.deferToThread(thd) def findAvatarMime(self, data): # http://en.wikipedia.org/wiki/List_of_file_signatures if data.startswith(b"\xff\xd8\xff"): return ("image/jpeg", data) if data.startswith(b"\x89PNG"): return ("image/png", data) if data.startswith(b"GIF8"): return ("image/gif", data) # ignore unknown image format return None def getUserAvatar(self, user_email, size, defaultAvatarUrl): user_email = bytes2unicode(user_email) def thd(): c = self.connectLdap() pattern = self.avatarPattern % dict(email=user_email) res = self.search(c, self.accountBase, pattern, attributes=[self.avatarData]) if not res: return None ldap_infos = res[0]['raw_attributes'] if self.avatarData in ldap_infos and ldap_infos[self.avatarData]: data = ldap_infos[self.avatarData][0] return self.findAvatarMime(data) return None return threads.deferToThread(thd) buildbot-2.6.0/master/buildbot/www/oauth2.py000066400000000000000000000351761361162603000210340ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import re import textwrap from posixpath import join from urllib.parse import parse_qs from urllib.parse import urlencode import jinja2 import requests from twisted.internet import defer from twisted.internet import threads from buildbot import config from buildbot.process.properties import Properties from buildbot.util import bytes2unicode from buildbot.util.logger import Logger from buildbot.www import auth from buildbot.www import resource log = Logger() class OAuth2LoginResource(auth.LoginResource): # disable reconfigResource calls needsReconfig = False def __init__(self, master, _auth): super().__init__(master) self.auth = _auth def render_POST(self, request): return self.asyncRenderHelper(request, self.renderLogin) @defer.inlineCallbacks def renderLogin(self, request): code = request.args.get(b"code", [b""])[0] if not code: url = request.args.get(b"redirect", [None])[0] url = yield self.auth.getLoginURL(url) raise resource.Redirect(url) details = yield self.auth.verifyCode(code) if self.auth.userInfoProvider is not None: infos = yield self.auth.userInfoProvider.getUserInfo(details['username']) details.update(infos) session = request.getSession() session.user_info = details session.updateSession(request) state = request.args.get(b"state", [b""])[0] if state: for redirect in parse_qs(state).get('redirect', []): raise resource.Redirect(self.auth.homeUri + "#" + redirect) raise resource.Redirect(self.auth.homeUri) class OAuth2Auth(auth.AuthBase): name = 'oauth2' getTokenUseAuthHeaders = False authUri = None tokenUri = None grantType = 'authorization_code' authUriAdditionalParams = {} tokenUriAdditionalParams = {} loginUri = None homeUri = None sslVerify = None def __init__(self, clientId, clientSecret, autologin=False, **kwargs): super().__init__(**kwargs) self.clientId = clientId self.clientSecret = clientSecret self.autologin = autologin def reconfigAuth(self, master, new_config): self.master = master self.loginUri = join(new_config.buildbotURL, "auth/login") self.homeUri = new_config.buildbotURL def getConfigDict(self): return dict(name=self.name, oauth2=True, fa_icon=self.faIcon, autologin=self.autologin ) def getLoginResource(self): return OAuth2LoginResource(self.master, self) @defer.inlineCallbacks def getLoginURL(self, redirect_url): """ Returns the url to redirect the user to for user consent """ p = Properties() p.master = self.master clientId = yield p.render(self.clientId) oauth_params = {'redirect_uri': self.loginUri, 'client_id': clientId, 'response_type': 'code'} if redirect_url is not None: oauth_params['state'] = urlencode(dict(redirect=redirect_url)) oauth_params.update(self.authUriAdditionalParams) sorted_oauth_params = sorted(oauth_params.items(), key=lambda val: val[0]) return "%s?%s" % (self.authUri, urlencode(sorted_oauth_params)) def createSessionFromToken(self, token): s = requests.Session() s.params = {'access_token': token['access_token']} s.verify = self.sslVerify return s def get(self, session, path): ret = session.get(self.resourceEndpoint + path) return ret.json() # based on https://github.com/maraujop/requests-oauth # from Miguel Araujo, augmented to support header based clientSecret # passing @defer.inlineCallbacks def verifyCode(self, code): # everything in deferToThread is not counted with trial --coverage :-( def thd(client_id, client_secret): url = self.tokenUri data = {'redirect_uri': self.loginUri, 'code': code, 'grant_type': self.grantType} auth = None if self.getTokenUseAuthHeaders: auth = (client_id, client_secret) else: data.update( {'client_id': client_id, 'client_secret': client_secret}) data.update(self.tokenUriAdditionalParams) response = requests.post( url, data=data, auth=auth, verify=self.sslVerify) response.raise_for_status() responseContent = bytes2unicode(response.content) try: content = json.loads(responseContent) except ValueError: content = parse_qs(responseContent) for k, v in content.items(): content[k] = v[0] except TypeError: content = responseContent session = self.createSessionFromToken(content) return self.getUserInfoFromOAuthClient(session) p = Properties() p.master = self.master client_id = yield p.render(self.clientId) client_secret = yield p.render(self.clientSecret) result = yield threads.deferToThread(thd, client_id, client_secret) return result def getUserInfoFromOAuthClient(self, c): return {} class GoogleAuth(OAuth2Auth): name = "Google" faIcon = "fa-google-plus" resourceEndpoint = "https://www.googleapis.com/oauth2/v1" authUri = 'https://accounts.google.com/o/oauth2/auth' tokenUri = 'https://accounts.google.com/o/oauth2/token' authUriAdditionalParams = dict(scope=" ".join([ 'https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile' ])) def getUserInfoFromOAuthClient(self, c): data = self.get(c, '/userinfo') return dict(full_name=data["name"], username=data['email'].split("@")[0], email=data["email"], avatar_url=data["picture"]) class GitHubAuth(OAuth2Auth): name = "GitHub" faIcon = "fa-github" authUri = 'https://github.com/login/oauth/authorize' authUriAdditionalParams = {'scope': 'user:email read:org'} tokenUri = 'https://github.com/login/oauth/access_token' resourceEndpoint = 'https://api.github.com' getUserTeamsGraphqlTpl = textwrap.dedent(r''' {%- if organizations %} query getOrgTeamMembership { {%- for org_slug, org_name in organizations.items() %} {{ org_slug }}: organization(login: "{{ org_name }}") { teams(first: 100 userLogins: ["{{ user_info.username }}"]) { edges { node { name, slug } } } } {%- endfor %} } {%- endif %} ''') def __init__(self, clientId, clientSecret, serverURL=None, autologin=False, apiVersion=3, getTeamsMembership=False, debug=False, **kwargs): super().__init__(clientId, clientSecret, autologin, **kwargs) if serverURL is not None: # setup for enterprise github if serverURL.endswith("/"): serverURL = serverURL[:-1] # v3 is accessible directly at /api/v3 for enterprise, but directly for SaaS.. self.resourceEndpoint = serverURL + '/api/v3' self.authUri = '{0}/login/oauth/authorize'.format(serverURL) self.tokenUri = '{0}/login/oauth/access_token'.format(serverURL) self.serverURL = serverURL or self.resourceEndpoint if apiVersion not in (3, 4): config.error( 'GitHubAuth apiVersion must be 3 or 4 not {}'.format( apiVersion)) self.apiVersion = apiVersion if apiVersion == 3: if getTeamsMembership is True: config.error( 'Retrieving team membership information using GitHubAuth is only ' 'possible using GitHub api v4.') else: self.apiResourceEndpoint = self.serverURL + '/graphql' if getTeamsMembership: # GraphQL name aliases must comply with /^[_a-zA-Z][_a-zA-Z0-9]*$/ self._orgname_slug_sub_re = re.compile(r'[^_a-zA-Z0-9]') self.getUserTeamsGraphqlTplC = jinja2.Template( self.getUserTeamsGraphqlTpl.strip()) self.getTeamsMembership = getTeamsMembership self.debug = debug def post(self, session, query): if self.debug: log.info('{klass} GraphQL POST Request: {endpoint} -> ' 'DATA:\n----\n{data}\n----', klass=self.__class__.__name__, endpoint=self.apiResourceEndpoint, data=query) ret = session.post(self.apiResourceEndpoint, json={'query': query}) return ret.json() def getUserInfoFromOAuthClient(self, c): if self.apiVersion == 3: return self.getUserInfoFromOAuthClient_v3(c) return self.getUserInfoFromOAuthClient_v4(c) def getUserInfoFromOAuthClient_v3(self, c): user = self.get(c, '/user') emails = self.get(c, '/user/emails') for email in emails: if email.get('primary', False): user['email'] = email['email'] break orgs = self.get(c, '/user/orgs') return dict(full_name=user['name'], email=user['email'], username=user['login'], groups=[org['login'] for org in orgs]) def getUserInfoFromOAuthClient_v4(self, c): graphql_query = textwrap.dedent(''' query { viewer { email login name organizations(first: 100) { edges { node { login } } } } } ''') data = self.post(c, graphql_query.strip()) data = data['data'] if self.debug: log.info('{klass} GraphQL Response: {response}', klass=self.__class__.__name__, response=data) user_info = dict(full_name=data['viewer']['name'], email=data['viewer']['email'], username=data['viewer']['login'], groups=[org['node']['login'] for org in data['viewer']['organizations']['edges']]) if self.getTeamsMembership: orgs_name_slug_mapping = { self._orgname_slug_sub_re.sub('_', n): n for n in user_info['groups']} graphql_query = self.getUserTeamsGraphqlTplC.render( {'user_info': user_info, 'organizations': orgs_name_slug_mapping}) if graphql_query: data = self.post(c, graphql_query) if self.debug: log.info('{klass} GraphQL Response: {response}', klass=self.__class__.__name__, response=data) teams = set() for org, team_data in data['data'].items(): if team_data is None: # Organizations can have OAuth App access restrictions enabled, # disallowing team data access to third-parties. continue for node in team_data['teams']['edges']: # On github we can mentions organization teams like # @org-name/team-name. Let's keep the team formatting # identical with the inclusion of the organization # since different organizations might share a common # team name teams.add('%s/%s' % (orgs_name_slug_mapping[org], node['node']['name'])) teams.add('%s/%s' % (orgs_name_slug_mapping[org], node['node']['slug'])) user_info['groups'].extend(sorted(teams)) if self.debug: log.info('{klass} User Details: {user_info}', klass=self.__class__.__name__, user_info=user_info) return user_info class GitLabAuth(OAuth2Auth): name = "GitLab" faIcon = "fa-git" def __init__(self, instanceUri, clientId, clientSecret, **kwargs): uri = instanceUri.rstrip("/") self.authUri = "%s/oauth/authorize" % uri self.tokenUri = "%s/oauth/token" % uri self.resourceEndpoint = "%s/api/v4" % uri super(GitLabAuth, self).__init__(clientId, clientSecret, **kwargs) def getUserInfoFromOAuthClient(self, c): user = self.get(c, "/user") groups = self.get(c, "/groups") return dict(full_name=user["name"], username=user["username"], email=user["email"], avatar_url=user["avatar_url"], groups=[g["path"] for g in groups]) class BitbucketAuth(OAuth2Auth): name = "Bitbucket" faIcon = "fa-bitbucket" authUri = 'https://bitbucket.org/site/oauth2/authorize' tokenUri = 'https://bitbucket.org/site/oauth2/access_token' resourceEndpoint = 'https://api.bitbucket.org/2.0' def getUserInfoFromOAuthClient(self, c): user = self.get(c, '/user') emails = self.get(c, '/user/emails') for email in emails["values"]: if email.get('is_primary', False): user['email'] = email['email'] break orgs = self.get(c, '/teams?role=member') return dict(full_name=user['display_name'], email=user['email'], username=user['username'], groups=[org['username'] for org in orgs["values"]]) buildbot-2.6.0/master/buildbot/www/plugin.py000066400000000000000000000030361361162603000211160ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import pkg_resources from twisted.web import static from buildbot.util import bytes2unicode class Application: def __init__(self, modulename, description, ui=True): self.description = description self.version = pkg_resources.resource_string( modulename, "VERSION").strip() self.version = bytes2unicode(self.version) self.static_dir = pkg_resources.resource_filename( modulename, "static") self.resource = static.File(self.static_dir) self.ui = ui def setMaster(self, master): self.master = master def setConfiguration(self, config): self.config = config def __repr__(self): return ("www.plugin.Application(version=%(version)s, " "description=%(description)s, " "static_dir=%(static_dir)s)") % self.__dict__ buildbot-2.6.0/master/buildbot/www/resource.py000066400000000000000000000074461361162603000214600ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import re from twisted.internet import defer from twisted.python import log from twisted.web import resource from twisted.web import server from twisted.web.error import Error from buildbot.util import unicode2bytes _CR_LF_RE = re.compile(br"[\r\n]+.*") def protect_redirect_url(url): return _CR_LF_RE.sub(b"", url) class Redirect(Error): def __init__(self, url): super().__init__(302, "redirect") self.url = protect_redirect_url(unicode2bytes(url)) class Resource(resource.Resource): # if this is true for a class, then instances will have their # reconfigResource(new_config) methods called on reconfig. needsReconfig = False # as a convenience, subclasses have a ``master`` attribute, a # ``base_url`` attribute giving Buildbot's base URL, # and ``static_url`` attribute giving Buildbot's static files URL @property def base_url(self): return self.master.config.buildbotURL def __init__(self, master): super().__init__() self.master = master if self.needsReconfig and master is not None: master.www.resourceNeedsReconfigs(self) def reconfigResource(self, new_config): raise NotImplementedError def asyncRenderHelper(self, request, _callable, writeError=None): def writeErrorDefault(msg, errcode=400): request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') request.write(msg) request.finish() if writeError is None: writeError = writeErrorDefault try: d = _callable(request) except Exception as e: d = defer.fail(e) @d.addCallback def finish(s): try: if s is not None: request.write(s) request.finish() except RuntimeError: # pragma: no cover # this occurs when the client has already disconnected; ignore # it (see #2027) log.msg("http client disconnected before results were sent") @d.addErrback def failHttpRedirect(f): f.trap(Redirect) request.redirect(f.value.url) request.finish() return None @d.addErrback def failHttpError(f): f.trap(Error) e = f.value message = unicode2bytes(e.message) writeError(message, errcode=int(e.status)) @d.addErrback def fail(f): log.err(f, 'While rendering resource:') try: writeError(b'internal error - see logs', errcode=500) except Exception: try: request.finish() except Exception: pass return server.NOT_DONE_YET class RedirectResource(Resource): def __init__(self, master, basepath): super().__init__(master) self.basepath = basepath def render(self, request): redir = self.base_url + self.basepath request.redirect(protect_redirect_url(redir)) return redir buildbot-2.6.0/master/buildbot/www/rest.py000066400000000000000000000511041361162603000205740ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import cgi import datetime import fnmatch import json import re from contextlib import contextmanager from urllib.parse import urlparse from twisted.internet import defer from twisted.python import log from twisted.web.error import Error from buildbot.data import exceptions from buildbot.data import resultspec from buildbot.util import bytes2unicode from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.www import resource from buildbot.www.authz import Forbidden class BadRequest(Exception): pass class BadJsonRpc2(Exception): def __init__(self, message, jsonrpccode): self.message = message self.jsonrpccode = jsonrpccode class ContentTypeParser: def __init__(self, contenttype): self.typeheader = contenttype def gettype(self): mimetype, options = cgi.parse_header( bytes2unicode(self.typeheader)) return mimetype URL_ENCODED = b"application/x-www-form-urlencoded" JSON_ENCODED = b"application/json" class RestRootResource(resource.Resource): version_classes = {} @classmethod def addApiVersion(cls, version, version_cls): cls.version_classes[version] = version_cls version_cls.apiVersion = version def __init__(self, master): super().__init__(master) min_vers = master.config.www.get('rest_minimum_version', 0) latest = max(list(self.version_classes)) for version, klass in self.version_classes.items(): if version < min_vers: continue child = klass(master) child_path = 'v{}'.format(version) child_path = unicode2bytes(child_path) self.putChild(child_path, child) if version == latest: self.putChild(b'latest', child) def render(self, request): request.setHeader(b"content-type", JSON_ENCODED) min_vers = self.master.config.www.get('rest_minimum_version', 0) api_versions = dict(('v%d' % v, '%sapi/v%d' % (self.base_url, v)) for v in self.version_classes if v > min_vers) data = json.dumps(dict(api_versions=api_versions)) return unicode2bytes(data) JSONRPC_CODES = dict(parse_error=-32700, invalid_request=-32600, method_not_found=-32601, invalid_params=-32602, internal_error=-32603) class V2RootResource(resource.Resource): # For GETs, this API follows http://jsonapi.org. The getter API does not # permit create, update, or delete, so this is limited to reading. # # Data API control methods can be invoked via a POST to the appropriate # URL. These follow http://www.jsonrpc.org/specification, with a few # limitations: # - params as list is not supported # - rpc call batching is not supported # - jsonrpc2 notifications are not supported (you always get an answer) # rather than construct the entire possible hierarchy of Rest resources, # this is marked as a leaf node, and any remaining path items are parsed # during rendering isLeaf = True # enable reconfigResource calls needsReconfig = True @defer.inlineCallbacks def getEndpoint(self, request, method, params): # note that trailing slashes are not allowed request_postpath = tuple(bytes2unicode(p) for p in request.postpath) yield self.master.www.assertUserAllowed(request, request_postpath, method, params) ret = yield self.master.data.getEndpoint(request_postpath) return ret @contextmanager def handleErrors(self, writeError): try: yield except exceptions.InvalidPathError as e: msg = unicode2bytes(e.args[0]) writeError(msg or b"invalid path", errcode=404, jsonrpccode=JSONRPC_CODES['invalid_request']) return except exceptions.InvalidControlException as e: msg = unicode2bytes(str(e)) writeError(msg or b"invalid control action", errcode=501, jsonrpccode=JSONRPC_CODES["method_not_found"]) return except BadRequest as e: msg = unicode2bytes(e.args[0]) writeError(msg or b"invalid request", errcode=400, jsonrpccode=JSONRPC_CODES["method_not_found"]) return except BadJsonRpc2 as e: msg = unicode2bytes(e.message) writeError(msg, errcode=400, jsonrpccode=e.jsonrpccode) return except Forbidden as e: # There is nothing in jsonrc spec about forbidden error, so pick # invalid request msg = unicode2bytes(e.message) writeError( msg, errcode=403, jsonrpccode=JSONRPC_CODES["invalid_request"]) return except Exception as e: log.err(_why='while handling API request') msg = unicode2bytes(repr(e)) writeError(repr(e), errcode=500, jsonrpccode=JSONRPC_CODES["internal_error"]) return # JSONRPC2 support def decodeJsonRPC2(self, request): # Verify the content-type. Browsers are easily convinced to send # POST data to arbitrary URLs via 'form' elements, but they won't # use the application/json content-type. if ContentTypeParser(request.getHeader(b'content-type')).gettype() != "application/json": raise BadJsonRpc2('Invalid content-type (use application/json)', JSONRPC_CODES["invalid_request"]) try: data = json.loads(bytes2unicode(request.content.read())) except Exception as e: raise BadJsonRpc2("JSON parse error: %s" % (str(e),), JSONRPC_CODES["parse_error"]) if isinstance(data, list): raise BadJsonRpc2("JSONRPC batch requests are not supported", JSONRPC_CODES["invalid_request"]) if not isinstance(data, dict): raise BadJsonRpc2("JSONRPC root object must be an object", JSONRPC_CODES["invalid_request"]) def check(name, types, typename): if name not in data: raise BadJsonRpc2("missing key '%s'" % (name,), JSONRPC_CODES["invalid_request"]) if not isinstance(data[name], types): raise BadJsonRpc2("'%s' must be %s" % (name, typename), JSONRPC_CODES["invalid_request"]) check("jsonrpc", (str,), "a string") check("method", (str,), "a string") check("id", (str, int, type(None)), "a string, number, or null") check("params", (dict,), "an object") if data['jsonrpc'] != '2.0': raise BadJsonRpc2("only JSONRPC 2.0 is supported", JSONRPC_CODES['invalid_request']) return data["method"], data["id"], data['params'] @defer.inlineCallbacks def renderJsonRpc(self, request): jsonRpcReply = {'jsonrpc': "2.0"} def writeError(msg, errcode=399, jsonrpccode=JSONRPC_CODES["internal_error"]): if isinstance(msg, bytes): msg = bytes2unicode(msg) if self.debug: log.msg("JSONRPC error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader(b'content-type', JSON_ENCODED) if "error" not in jsonRpcReply: # already filled in by caller jsonRpcReply['error'] = dict(code=jsonrpccode, message=msg) data = json.dumps(jsonRpcReply) data = unicode2bytes(data) request.write(data) with self.handleErrors(writeError): method, id, params = self.decodeJsonRPC2(request) jsonRpcReply['id'] = id ep, kwargs = yield self.getEndpoint(request, method, params) userinfos = self.master.www.getUserInfos(request) if 'anonymous' in userinfos and userinfos['anonymous']: owner = "anonymous" else: owner = userinfos['email'] params['owner'] = owner result = yield ep.control(method, params, kwargs) jsonRpcReply['result'] = result data = json.dumps(jsonRpcReply, default=toJson, sort_keys=True, separators=(',', ':')) request.setHeader(b'content-type', JSON_ENCODED) if request.method == b"HEAD": request.setHeader(b"content-length", unicode2bytes(str(len(data)))) request.write(b'') else: data = unicode2bytes(data) request.write(data) # JSONAPI support def decodeResultSpec(self, request, endpoint): reqArgs = request.args def checkFields(fields, negOk=False): for field in fields: k = bytes2unicode(field) if k[0] == '-' and negOk: k = k[1:] if k not in entityType.fieldNames: raise BadRequest("no such field '{}'".format(k)) entityType = endpoint.rtype.entityType limit = offset = order = fields = None filters, properties = [], [] for arg in reqArgs: argStr = bytes2unicode(arg) if arg == b'order': order = tuple([bytes2unicode(o) for o in reqArgs[arg]]) checkFields(order, True) elif arg == b'field': fields = reqArgs[arg] checkFields(fields, False) elif arg == b'limit': try: limit = int(reqArgs[arg][0]) except Exception: raise BadRequest('invalid limit') elif arg == b'offset': try: offset = int(reqArgs[arg][0]) except Exception: raise BadRequest('invalid offset') elif arg == b'property': try: props = [] for v in reqArgs[arg]: if not isinstance(v, (bytes, str)): raise TypeError( "Invalid type {} for {}".format(type(v), v)) props.append(bytes2unicode(v)) except Exception: raise BadRequest( 'invalid property value for {}'.format(arg)) properties.append(resultspec.Property(arg, 'eq', props)) elif argStr in entityType.fieldNames: field = entityType.fields[argStr] try: values = [field.valueFromString(v) for v in reqArgs[arg]] except Exception: raise BadRequest( 'invalid filter value for {}'.format(argStr)) filters.append(resultspec.Filter(argStr, 'eq', values)) elif '__' in argStr: field, op = argStr.rsplit('__', 1) args = reqArgs[arg] operators = (resultspec.Filter.singular_operators if len(args) == 1 else resultspec.Filter.plural_operators) if op in operators and field in entityType.fieldNames: fieldType = entityType.fields[field] try: values = [fieldType.valueFromString(v) for v in reqArgs[arg]] except Exception: raise BadRequest( 'invalid filter value for {}'.format(argStr)) filters.append(resultspec.Filter(field, op, values)) else: raise BadRequest( "unrecognized query parameter '{}'".format(argStr)) # if ordering or filtering is on a field that's not in fields, bail out if fields: fields = [bytes2unicode(f) for f in fields] fieldsSet = set(fields) if order and {o.lstrip('-') for o in order} - fieldsSet: raise BadRequest("cannot order on un-selected fields") for filter in filters: if filter.field not in fieldsSet: raise BadRequest("cannot filter on un-selected fields") # build the result spec rspec = resultspec.ResultSpec(fields=fields, limit=limit, offset=offset, order=order, filters=filters, properties=properties) # for singular endpoints, only allow fields if not endpoint.isCollection: if rspec.filters: raise BadRequest("this is not a collection") return rspec def encodeRaw(self, data, request): request.setHeader(b"content-type", unicode2bytes(data['mime-type']) + b'; charset=utf-8') request.setHeader(b"content-disposition", b'attachment; filename=' + unicode2bytes(data['filename'])) request.write(unicode2bytes(data['raw'])) return @defer.inlineCallbacks def renderRest(self, request): def writeError(msg, errcode=404, jsonrpccode=None): if self.debug: log.msg("REST error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') msg = bytes2unicode(msg) data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) with self.handleErrors(writeError): ep, kwargs = yield self.getEndpoint(request, bytes2unicode(request.method), {}) rspec = self.decodeResultSpec(request, ep) data = yield ep.get(rspec, kwargs) if data is None: msg = ("not found while getting from {} with " "arguments {} and {}").format(repr(ep), repr(rspec), str(kwargs)) msg = unicode2bytes(msg) writeError(msg, errcode=404) return if ep.isRaw: self.encodeRaw(data, request) return # post-process any remaining parts of the resultspec data = rspec.apply(data) # annotate the result with some metadata meta = {} if ep.isCollection: offset, total = data.offset, data.total if offset is None: offset = 0 # add total, if known if total is not None: meta['total'] = total # get the real list instance out of the ListResult data = data.data else: data = [data] typeName = ep.rtype.plural data = { typeName: data, 'meta': meta } # set up the content type and formatting options; if the request # accepts text/html or text/plain, the JSON will be rendered in a # readable, multiline format. if b'application/json' in (request.getHeader(b'accept') or b''): compact = True request.setHeader(b"content-type", b'application/json; charset=utf-8') else: compact = False request.setHeader(b"content-type", b'text/plain; charset=utf-8') # set up caching if self.cache_seconds: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(seconds=self.cache_seconds) expiresBytes = unicode2bytes( expires.strftime("%a, %d %b %Y %H:%M:%S GMT")) request.setHeader(b"Expires", expiresBytes) request.setHeader(b"Pragma", b"no-cache") # filter out blanks if necessary and render the data if compact: data = json.dumps(data, default=toJson, sort_keys=True, separators=(',', ':')) else: data = json.dumps(data, default=toJson, sort_keys=True, indent=2) if request.method == b"HEAD": request.setHeader(b"content-length", unicode2bytes(str(len(data)))) else: data = unicode2bytes(data) request.write(data) def reconfigResource(self, new_config): # buildbotURL may contain reverse proxy path, Origin header is just # scheme + host + port buildbotURL = urlparse(unicode2bytes(new_config.buildbotURL)) origin_self = buildbotURL.scheme + b"://" + buildbotURL.netloc # pre-translate the origin entries in the config self.origins = [] for o in new_config.www.get('allowed_origins', [origin_self]): origin = bytes2unicode(o).lower() self.origins.append(re.compile(fnmatch.translate(origin))) # and copy some other flags self.debug = new_config.www.get('debug') self.cache_seconds = new_config.www.get('json_cache_seconds', 0) def render(self, request): def writeError(msg, errcode=400): msg = bytes2unicode(msg) if self.debug: log.msg("HTTP error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') if request.method == b'POST': # jsonRPC callers want the error message in error.message data = json.dumps(dict(error=dict(message=msg))) data = unicode2bytes(data) request.write(data) else: data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) request.finish() return self.asyncRenderHelper(request, self.asyncRender, writeError) @defer.inlineCallbacks def asyncRender(self, request): # Handle CORS, if necessary. origins = self.origins if origins is not None: isPreflight = False reqOrigin = request.getHeader(b'origin') if reqOrigin: err = None reqOrigin = reqOrigin.lower() if not any(o.match(bytes2unicode(reqOrigin)) for o in self.origins): err = b"invalid origin" elif request.method == b'OPTIONS': preflightMethod = request.getHeader( b'access-control-request-method') if preflightMethod not in (b'GET', b'POST', b'HEAD'): err = b'invalid method' isPreflight = True if err: raise Error(400, err) # If it's OK, then let the browser know we checked it out. The # Content-Type header is included here because CORS considers # content types other than form data and text/plain to not be # simple. request.setHeader(b"access-control-allow-origin", reqOrigin) request.setHeader(b"access-control-allow-headers", b"Content-Type") request.setHeader(b"access-control-max-age", b'3600') # if this was a preflight request, we're done if isPreflight: return b"" # based on the method, this is either JSONRPC or REST if request.method == b'POST': res = yield self.renderJsonRpc(request) elif request.method in (b'GET', b'HEAD'): res = yield self.renderRest(request) else: raise Error(400, b"invalid HTTP method") return res RestRootResource.addApiVersion(2, V2RootResource) buildbot-2.6.0/master/buildbot/www/service.py000066400000000000000000000347341361162603000212710ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import calendar import datetime import os from binascii import hexlify import jwt from twisted.application import strports from twisted.cred.portal import IRealm from twisted.cred.portal import Portal from twisted.internet import defer from twisted.python import components from twisted.python import log from twisted.python.logfile import LogFile from twisted.web import guard from twisted.web import resource from twisted.web import server from zope.interface import implementer from buildbot.plugins.db import get_plugins from buildbot.util import bytes2unicode from buildbot.util import service from buildbot.util import unicode2bytes from buildbot.www import auth from buildbot.www import avatar from buildbot.www import change_hook from buildbot.www import config as wwwconfig from buildbot.www import rest from buildbot.www import sse from buildbot.www import ws # as per: http://security.stackexchange.com/questions/95972/what-are-requirements-for-hmac-secret-key # we need 128 bit key for HS256 SESSION_SECRET_LENGTH = 128 SESSION_SECRET_ALGORITHM = "HS256" class BuildbotSession(server.Session): # We deviate a bit from the twisted API in order to implement that. # We keep it a subclass of server.Session (to be safe against isinstance), # but we re implement all its API. # But as there is no support in twisted web for clustered session management, this leaves # us with few choice. expDelay = datetime.timedelta(weeks=1) def __init__(self, site, token=None): """ Initialize a session with a unique ID for that session. """ self.site = site assert self.site.session_secret is not None, "site.session_secret is not configured yet!" # Cannot use super() here as it would call server.Session.__init__ # which we explicitly want to override. However, we still want to call # server.Session parent class constructor components.Componentized.__init__(self) if token: self._fromToken(token) else: self._defaultValue() def _defaultValue(self): self.user_info = {"anonymous": True} def _fromToken(self, token): try: decoded = jwt.decode(token, self.site.session_secret, algorithms=[ SESSION_SECRET_ALGORITHM]) except jwt.exceptions.ExpiredSignatureError as e: raise KeyError(str(e)) except Exception as e: log.err(e, "while decoding JWT session") raise KeyError(str(e)) # might raise KeyError: will be caught by caller, which makes the token invalid self.user_info = decoded['user_info'] def updateSession(self, request): """ Update the cookie after session object was modified @param request: the request object which should get a new cookie """ # we actually need to copy some hardcoded constants from twisted :-( # Make sure we aren't creating a secure session on a non-secure page secure = request.isSecure() if not secure: cookieString = b"TWISTED_SESSION" else: cookieString = b"TWISTED_SECURE_SESSION" cookiename = b"_".join([cookieString] + request.sitepath) request.addCookie(cookiename, self.uid, path=b"/", secure=secure) def expire(self): # caller must still call self.updateSession() to actually expire it self._defaultValue() def notifyOnExpire(self, callback): raise NotImplementedError( "BuildbotSession can't support notify on session expiration") def touch(self): pass @property def uid(self): """uid is now generated automatically according to the claims. This should actually only be used for cookie generation """ exp = datetime.datetime.utcnow() + self.expDelay claims = { 'user_info': self.user_info, # Note that we use JWT standard 'exp' field to implement session expiration # we completely bypass twisted.web session expiration mechanisms 'exp': calendar.timegm(datetime.datetime.timetuple(exp))} return jwt.encode(claims, self.site.session_secret, algorithm=SESSION_SECRET_ALGORITHM) class BuildbotSite(server.Site): """ A custom Site for Buildbot needs. Supports rotating logs, and JWT sessions """ def __init__(self, root, logPath, rotateLength, maxRotatedFiles): super().__init__(root, logPath=logPath) self.rotateLength = rotateLength self.maxRotatedFiles = maxRotatedFiles self.session_secret = None def _openLogFile(self, path): self._nativeize = True return LogFile.fromFullPath( path, rotateLength=self.rotateLength, maxRotatedFiles=self.maxRotatedFiles) def getResourceFor(self, request): request.responseHeaders.removeHeader('Server') return server.Site.getResourceFor(self, request) def setSessionSecret(self, secret): self.session_secret = secret def makeSession(self): """ Generate a new Session instance, but not store it for future reference (because it will be used by another master instance) The session will still be cached by twisted.request """ return BuildbotSession(self) def getSession(self, uid): """ Get a previously generated session. @param uid: Unique ID of the session (a JWT token). @type uid: L{bytes}. @raise: L{KeyError} if the session is not found. """ return BuildbotSession(self, uid) class WWWService(service.ReconfigurableServiceMixin, service.AsyncMultiService): name = 'www' def __init__(self): super().__init__() self.port = None self.port_service = None self.site = None # load the apps early, in case something goes wrong in Python land self.apps = get_plugins('www', None, load_now=True) @property def auth(self): return self.master.config.www['auth'] @defer.inlineCallbacks def reconfigServiceWithBuildbotConfig(self, new_config): www = new_config.www self.authz = www.get('authz') if self.authz is not None: self.authz.setMaster(self.master) need_new_site = False if self.site: # if config params have changed, set need_new_site to True. # There are none right now. need_new_site = False else: if www['port']: need_new_site = True if need_new_site: self.setupSite(new_config) if self.site: self.reconfigSite(new_config) yield self.makeSessionSecret() if www['port'] != self.port: if self.port_service: yield defer.maybeDeferred(self.port_service.disownServiceParent) self.port_service = None self.port = www['port'] if self.port: port = self.port if isinstance(port, int): port = "tcp:%d" % port self.port_service = strports.service(port, self.site) # monkey-patch in some code to get the actual Port object # returned by endpoint.listen(). But only for tests. if port == "tcp:0:interface=127.0.0.1": if hasattr(self.port_service, 'endpoint'): old_listen = self.port_service.endpoint.listen @defer.inlineCallbacks def listen(factory): port = yield old_listen(factory) self._getPort = lambda: port return port self.port_service.endpoint.listen = listen else: # older twisted's just have the port sitting there # as an instance attribute self._getPort = lambda: self.port_service._port yield self.port_service.setServiceParent(self) if not self.port_service: log.msg("No web server configured on this master") yield super().reconfigServiceWithBuildbotConfig(new_config) def getPortnum(self): # for tests, when the configured port is 0 and the kernel selects a # dynamic port. This will fail if the monkeypatch in reconfigService # was not made. return self._getPort().getHost().port def configPlugins(self, root, new_config): known_plugins = set(new_config.www.get('plugins', {})) | set(['base']) for key, plugin in list(new_config.www.get('plugins', {}).items()): log.msg("initializing www plugin %r" % (key,)) if key not in self.apps: raise RuntimeError( "could not find plugin %s; is it installed?" % (key,)) app = self.apps.get(key) app.setMaster(self.master) app.setConfiguration(plugin) root.putChild(unicode2bytes(key), app.resource) if not app.ui: del new_config.www['plugins'][key] for plugin_name in set(self.apps.names) - known_plugins: log.msg("NOTE: www plugin %r is installed but not " "configured" % (plugin_name,)) def setupSite(self, new_config): self.reconfigurableResources = [] # we're going to need at least the base plugin (buildbot-www) if 'base' not in self.apps: raise RuntimeError("could not find buildbot-www; is it installed?") root = self.apps.get('base').resource self.configPlugins(root, new_config) # / root.putChild(b'', wwwconfig.IndexResource( self.master, self.apps.get('base').static_dir)) # /auth root.putChild(b'auth', auth.AuthRootResource(self.master)) # /avatar root.putChild(b'avatar', avatar.AvatarResource(self.master)) # /api root.putChild(b'api', rest.RestRootResource(self.master)) # /ws root.putChild(b'ws', ws.WsResource(self.master)) # /sse root.putChild(b'sse', sse.EventResource(self.master)) # /change_hook resource_obj = change_hook.ChangeHookResource(master=self.master) # FIXME: this does not work with reconfig change_hook_auth = new_config.www.get('change_hook_auth') if change_hook_auth is not None: resource_obj = self.setupProtectedResource( resource_obj, change_hook_auth) root.putChild(b"change_hook", resource_obj) self.root = root rotateLength = new_config.www.get( 'logRotateLength') or self.master.log_rotation.rotateLength maxRotatedFiles = new_config.www.get( 'maxRotatedFiles') or self.master.log_rotation.maxRotatedFiles httplog = None if new_config.www['logfileName']: httplog = os.path.abspath( os.path.join(self.master.basedir, new_config.www['logfileName'])) self.site = BuildbotSite(root, logPath=httplog, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.site.sessionFactory = None # Make sure site.master is set. It is required for poller change_hook self.site.master = self.master # convert this to a tuple so it can't be appended anymore (in # case some dynamically created resources try to get reconfigs) self.reconfigurableResources = tuple(self.reconfigurableResources) def resourceNeedsReconfigs(self, resource): # flag this resource as needing to know when a reconfig occurs self.reconfigurableResources.append(resource) def reconfigSite(self, new_config): root = self.apps.get('base').resource self.configPlugins(root, new_config) new_config.www['auth'].reconfigAuth(self.master, new_config) cookie_expiration_time = new_config.www.get('cookie_expiration_time') if cookie_expiration_time is not None: BuildbotSession.expDelay = cookie_expiration_time for rsrc in self.reconfigurableResources: rsrc.reconfigResource(new_config) @defer.inlineCallbacks def makeSessionSecret(self): state = self.master.db.state objectid = yield state.getObjectId( "www", "buildbot.www.service.WWWService") def create_session_secret(): # Bootstrap: We need to create a key, that will be shared with other masters # and other runs of this master # we encode that in hex for db storage convenience return bytes2unicode(hexlify(os.urandom(int(SESSION_SECRET_LENGTH / 8)))) session_secret = yield state.atomicCreateState(objectid, "session_secret", create_session_secret) self.site.setSessionSecret(session_secret) def setupProtectedResource(self, resource_obj, checkers): @implementer(IRealm) class SimpleRealm: """ A realm which gives out L{ChangeHookResource} instances for authenticated users. """ def requestAvatar(self, avatarId, mind, *interfaces): if resource.IResource in interfaces: return (resource.IResource, resource_obj, lambda: None) raise NotImplementedError() portal = Portal(SimpleRealm(), checkers) credentialFactory = guard.BasicCredentialFactory('Protected area') wrapper = guard.HTTPAuthSessionWrapper(portal, [credentialFactory]) return wrapper def getUserInfos(self, request): session = request.getSession() return session.user_info def assertUserAllowed(self, request, ep, action, options): user_info = self.getUserInfos(request) return self.authz.assertUserAllowed(ep, action, options, user_info) buildbot-2.6.0/master/buildbot/www/sse.py000066400000000000000000000111741361162603000204140ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json import uuid from twisted.python import log from twisted.web import resource from twisted.web import server from buildbot.data.exceptions import InvalidPathError from buildbot.util import bytes2unicode from buildbot.util import toJson from buildbot.util import unicode2bytes class Consumer: def __init__(self, request): self.request = request self.qrefs = {} def stopConsuming(self, key=None): if key is not None: self.qrefs[key].stopConsuming() else: for qref in self.qrefs.values(): qref.stopConsuming() self.qrefs = {} def onMessage(self, event, data): request = self.request key = [bytes2unicode(e) for e in event] msg = dict(key=key, message=data) request.write(b"event: " + b"event" + b"\n") request.write( b"data: " + unicode2bytes(json.dumps(msg, default=toJson)) + b"\n") request.write(b"\n") def registerQref(self, path, qref): self.qrefs[path] = qref class EventResource(resource.Resource): isLeaf = True def __init__(self, master): super().__init__() self.master = master self.consumers = {} def decodePath(self, path): for i, p in enumerate(path): if p == b'*': path[i] = None return path def finish(self, request, code, msg): request.setResponseCode(code) request.setHeader(b'content-type', b'text/plain; charset=utf-8') request.write(msg) return def render(self, request): command = b"listen" path = request.postpath if path and path[-1] == b'': path = path[:-1] if path and path[0] in (b"listen", b"add", b"remove"): command = path[0] path = path[1:] if command == b"listen": cid = unicode2bytes(str(uuid.uuid4())) consumer = Consumer(request) elif command in (b"add", b"remove"): if path: cid = path[0] path = path[1:] if cid not in self.consumers: return self.finish(request, 400, b"unknown uuid") consumer = self.consumers[cid] else: return self.finish(request, 400, b"need uuid") pathref = b"/".join(path) path = self.decodePath(path) if command == b"add" or (command == b"listen" and path): options = request.args for k in options: if len(options[k]) == 1: options[k] = options[k][1] try: d = self.master.mq.startConsuming( consumer.onMessage, tuple([bytes2unicode(p) for p in path])) @d.addCallback def register(qref): consumer.registerQref(pathref, qref) d.addErrback(log.err, "while calling startConsuming") except NotImplementedError: return self.finish(request, 404, b"not implemented") except InvalidPathError: return self.finish(request, 404, b"not implemented") elif command == b"remove": try: consumer.stopConsuming(pathref) except KeyError: return self.finish(request, 404, b"consumer is not listening to this event") if command == b"listen": self.consumers[cid] = consumer request.setHeader(b"content-type", b"text/event-stream") request.write(b"") request.write(b"event: handshake\n") request.write(b"data: " + cid + b"\n") request.write(b"\n") d = request.notifyFinish() @d.addBoth def onEndRequest(_): consumer.stopConsuming() del self.consumers[cid] return server.NOT_DONE_YET self.finish(request, 200, b"ok") return buildbot-2.6.0/master/buildbot/www/ws.py000066400000000000000000000117021361162603000202500ustar00rootroot00000000000000# This file is part of . Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Team Members import json from autobahn.twisted.resource import WebSocketResource from autobahn.twisted.websocket import WebSocketServerFactory from autobahn.twisted.websocket import WebSocketServerProtocol from twisted.internet import defer from twisted.python import log from buildbot.util import bytes2unicode from buildbot.util import toJson from buildbot.util import unicode2bytes class WsProtocol(WebSocketServerProtocol): def __init__(self, master): super().__init__() self.master = master self.qrefs = {} self.debug = self.master.config.www.get('debug', False) def sendJsonMessage(self, **msg): return self.sendMessage(unicode2bytes(json.dumps(msg, default=toJson, separators=(',', ':')))) def onMessage(self, frame, isBinary): if self.debug: log.msg("FRAME %s" % frame) # parse the incoming request frame = json.loads(bytes2unicode(frame)) _id = frame.get("_id") if _id is None: return self.sendJsonMessage(error="no '_id' in websocket frame", code=400, _id=None) cmd = frame.pop("cmd", None) if cmd is None: return self.sendJsonMessage(error="no 'cmd' in websocket frame", code=400, _id=None) cmdmeth = "cmd_" + cmd meth = getattr(self, cmdmeth, None) if meth is None: return self.sendJsonMessage(error="no such command '%s'" % (cmd, ), code=404, _id=_id) try: return meth(**frame) except TypeError as e: return self.sendJsonMessage(error="Invalid method argument '%s'" % (str(e), ), code=400, _id=_id) except Exception as e: log.err("while calling command %s" % (cmd, )) return self.sendJsonMessage(error="Internal Error '%s'" % (str(e), ), code=500, _id=_id) def ack(self, _id): return self.sendJsonMessage(msg="OK", code=200, _id=_id) def parsePath(self, path): path = path.split("/") return tuple([str(p) if p != "*" else None for p in path]) def isPath(self, path): if not isinstance(path, str): return False return True @defer.inlineCallbacks def cmd_startConsuming(self, path, _id): if not self.isPath(path): yield self.sendJsonMessage(error="invalid path format '%s'" % (str(path), ), code=400, _id=_id) return # if it's already subscribed, don't leak a subscription if self.qrefs is not None and path in self.qrefs: yield self.ack(_id=_id) return def callback(key, message): # protocol is deliberately concise in size return self.sendJsonMessage(k="/".join(key), m=message) qref = yield self.master.mq.startConsuming(callback, self.parsePath(path)) # race conditions handling if self.qrefs is None or path in self.qrefs: qref.stopConsuming() # only store and ack if we were not disconnected in between if self.qrefs is not None: self.qrefs[path] = qref self.ack(_id=_id) @defer.inlineCallbacks def cmd_stopConsuming(self, path, _id): if not self.isPath(path): yield self.sendJsonMessage(error="invalid path format '%s'" % (str(path), ), code=400, _id=_id) return # only succeed if path has been started if path in self.qrefs: qref = self.qrefs.pop(path) yield qref.stopConsuming() yield self.ack(_id=_id) return yield self.sendJsonMessage(error="path was not consumed '%s'" % (str(path), ), code=400, _id=_id) def cmd_ping(self, _id): self.sendJsonMessage(msg="pong", code=200, _id=_id) def connectionLost(self, reason): if self.debug: log.msg("connection lost", system=self) for qref in self.qrefs.values(): qref.stopConsuming() self.qrefs = None # to be sure we don't add any more class WsProtocolFactory(WebSocketServerFactory): def __init__(self, master): super().__init__() self.master = master def buildProtocol(self, addr): p = WsProtocol(self.master) p.factory = self return p class WsResource(WebSocketResource): def __init__(self, master): super().__init__(WsProtocolFactory(master)) buildbot-2.6.0/master/docker/000077500000000000000000000000001361162603000160635ustar00rootroot00000000000000buildbot-2.6.0/master/docker/README.md000066400000000000000000000012101361162603000173340ustar00rootroot00000000000000Buildbot-Master docker container ================================ [Buildbot](http://buildbot.net) is a continuous integration framework written and configured in python. You can look at the [tutorial](http://docs.buildbot.net/latest/tutorial/docker.html) to learn how to use it. This container is based on alpine linux, and thus very lightweight. Another version based on ubuntu exists if you need more custom environment. The container expects a /var/lib/buildbot volume to store its configuration, and will open port 8010 for web server, and 9989 for worker connection. It is also expecting a postgresql container attached for storing state. buildbot-2.6.0/master/docker/buildbot.tac000066400000000000000000000011551361162603000203620ustar00rootroot00000000000000import os import sys from twisted.application import service from twisted.python.log import FileLogObserver from twisted.python.log import ILogObserver from buildbot.master import BuildMaster basedir = os.environ.get("BUILDBOT_BASEDIR", os.path.abspath(os.path.dirname(__file__))) configfile = 'master.cfg' # note: this line is matched against to check that this is a buildmaster # directory; do not edit it. application = service.Application('buildmaster') application.setComponent(ILogObserver, FileLogObserver(sys.stdout).emit) m = BuildMaster(basedir, configfile, umask=None) m.setServiceParent(application) buildbot-2.6.0/master/docker/start_buildbot.sh000077500000000000000000000034751361162603000214540ustar00rootroot00000000000000#!/bin/sh # startup script for purely stateless master # we download the config from an arbitrary curl accessible tar.gz file (which github can generate for us) B=`pwd` if [ -z "$BUILDBOT_CONFIG_URL" ] then if [ ! -f "$B/master.cfg" ] then echo No master.cfg found nor $$BUILDBOT_CONFIG_URL ! echo Please provide a master.cfg file in $B or provide a $$BUILDBOT_CONFIG_URL variable via -e exit 1 fi else BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config} mkdir -p $B/$BUILDBOT_CONFIG_DIR # if it ends with .tar.gz then its a tarball, else its directly the file if echo "$BUILDBOT_CONFIG_URL" | grep '.tar.gz$' >/dev/null then until curl -sL $BUILDBOT_CONFIG_URL | tar -xz --strip-components=1 --directory=$B/$BUILDBOT_CONFIG_DIR do echo "Can't download from \$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" sleep 1 done ln -sf $B/$BUILDBOT_CONFIG_DIR/master.cfg $B/master.cfg if [ -f $B/$BUILDBOT_CONFIG_DIR/buildbot.tac ] then ln -sf $B/$BUILDBOT_CONFIG_DIR/buildbot.tac $B/buildbot.tac fi else until curl -sL $BUILDBOT_CONFIG_URL > $B/master.cfg do echo "Can't download from $$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" done fi fi # copy the default buildbot.tac if not provided by the config if [ ! -f $B/buildbot.tac ] then cp /usr/src/buildbot/docker/buildbot.tac $B fi # Fixed buildbot master not start error in docker rm -f $B/twistd.pid # wait for db to start by trying to upgrade the master until buildbot upgrade-master $B do echo "Can't upgrade master yet. Waiting for database ready?" sleep 1 done # we use exec so that twistd use the pid 1 of the container, and so that signals are properly forwarded exec twistd -ny $B/buildbot.tac buildbot-2.6.0/master/docs/000077500000000000000000000000001361162603000155445ustar00rootroot00000000000000buildbot-2.6.0/master/docs/Makefile000066400000000000000000000133211361162603000172040ustar00rootroot00000000000000all: docs.tgz .PHONY: tutorial manual VERSION := $(shell if [ -n "$$VERSION" ]; then echo $$VERSION; else PYTHONPATH=..:$${PYTHONPATH} python -c 'from buildbot import version; print(version)'; fi) TAR_VERSION := $(shell tar --version) TAR_TRANSFORM := $(if $(filter bsdtar,$(TAR_VERSION)),-s /^html/$(VERSION)/,--transform s/^html/$(VERSION)/) docs.tgz: clean html singlehtml sed -e 's!href="index.html#!href="#!g' < _build/singlehtml/index.html > _build/html/full.html tar -C _build $(TAR_TRANSFORM) -zcf $@ html # -- Makefile for Sphinx documentation -- # You can set these variables from the command line. SPHINXOPTS = -q -W SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean towncrier html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* towncrier: if command -v towncrier >/dev/null 2>&1 ;\ then \ cd ../../; towncrier --draft |grep 'No significant changes.' || yes n | towncrier ;\ fi html: conf.py towncrier $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: conf.py towncrier $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: conf.py towncrier $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: conf.py towncrier $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: conf.py towncrier $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: conf.py towncrier $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: conf.py towncrier $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BuildbotTutorial.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BuildbotTutorial.qhc" devhelp: conf.py towncrier $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/BuildbotTutorial" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BuildbotTutorial" @echo "# devhelp" epub: conf.py towncrier $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: conf.py towncrier $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: conf.py towncrier $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: conf.py towncrier $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: conf.py towncrier $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: conf.py towncrier $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: conf.py towncrier $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." spelling: conf.py towncrier $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Spelling check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/spelling/output.txt." doctest: conf.py towncrier $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." buildbot-2.6.0/master/docs/_images/000077500000000000000000000000001361162603000171505ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_images/full_logo.png000066400000000000000000000221721361162603000216440ustar00rootroot00000000000000PNG  IHDR1y- sBIT|d pHYs+tEXtSoftwarewww.inkscape.org< IDATx]w|TU63o&I)iREeQ"kkYA\u)]P\g)BB $Nʔ73!~>~>[ν{=s"d,1GeM6Jem э?"*~7 kV{ s%KOky?BuhG;ڊi(Jw]xdϲaQݯl5ԘڽYs`Ο=O0юv\q4i 7[nD!7"&̸vhD i9]@DdIE7(m~:5W 0  3ne)3c uMaS2@dPUVo,u6[c!kG;.4&s4 )-=<:-itE*ú%][>=`k ~E iG;bҿy9*Ufu/8P{HԇrjPO`9rc^w&?i~PbӠR503O5ԍP$L&Ӓ:dREj5PeUUu Y~+<<͸8 YUU|D$HMMuxf:}4+IR_h8c6z[]]]q\2QQQ Ƭ׿_ Lߨ]5kNfeeI9mUUIQJ+}QLr555tFgjܹsjjjBPd~oݪ*,XO=m4SΝ/dڴi6nx ^:EQ}y_(KPNLɸ\!#9- ^:"N{%D~:]K! )M+կ?~MҰw0exD } SUq*d]LؿuVo8ADTTݭ[7SNNi6{KuIIIHIIAIIx߄Rvڵ޽{7S+Xf;BDnY&n_#2LR'ec4mΟ*oDa=ޠdk`y+>}gNH5DF3gΨNZ`Oq1ׯ7zNm>y`Q$KQs _IM6^|!K"}iH0/=k&@/=-=q=t'+}J:fzć2'('ދXr]ݜX{7WZܹs ^YUl%l :,δq9r +kt`z/ċiO}HƷN-)k Qj6K^=13e(ou!NdMo q$ɳG4-#_$yn4,A/ t:opMӟҤ(㸜ґeJ4199YYkjjcǎ1$I:!E:˲%Iҹsg.>>tTee%1=0;,eiQaJ955Uu: TUUNp83N;j8N^" 0O$IwXl`0M3J:s5lJ.c!yÐP Ⱦ?_io6/XeyϞ=sW.33S.9 /t)E%3>|RX"7~ԤI.tmm-|͠/(HQg#Fp d,#??ϗ_~㸒`{fĉ &u:(Ջm۶]q,)hdfMvv63n8SrroGmܸ:It:p(z- $ [Z9r$rssiU!"=Ș-fh]]U'`=Ȓ¨.aщYKrO1I#s3Ŀ澽%0/OvUy!T+? <5; iZ53JgNp(j<0fϞmt+jHjRN1gCǎ 05jnzp:cS:n#hr_JDi&ʀp!'EQLWy1]g6p $o*;~][8J< @o ˲FM<$ |$d?:b(Zc5cqaՁeYtqqq3gK D޽ ;vDCC9łlٹsg̘1ôdɒc[ccN󯁖߄`Xl۷O]r%G Nt, 2e!UeqӦM"0[EQ,|G@vv`$Ina0{8|'G:I( z>GQӧO7B;PZZ:OE7($%%E\H7N~ BKЍEdI%ļ4,lUؿZY`y,˫eGw{>w}Չ1#g#:n _p6E^w/KJJBR%Ͱj*$b57eYk̘1^bX^a<.-`x~~[o#v!m޼(DQ,=fѢEk͛g~C= 5htbttwoo%KO8VpunҥK>nf( &M 裏^w:]4L&S~Qi5}45Ԁқۺjۂ`O8ur]_y6ƁsZՠ]e&25bΜ9hX, .awN:IފUV5 0 :8O?IVknvZ }Hڷ~u#!!F&ѣG{EEE8}t ,3&,t8hW$EL3hB^( PE@ye"[՞9.<ϟa-++|q)]gƍؽ{7*+/zt a:uò[7ꁦ!^uee%.A Ir?b$->tk̤\Ẍ oƼ<'GdYܹsD}鍬,$mGP ݳEMHXNrM"o[+vy8ݪp}IRIһQPPlݺ{|H4,)={ӦM37 `ƌAK iWzWBBjXQQt>wm \8ٳa$OxΒ$4MFDxKPeee(-y:窚߿,:GZzE(Er=Nę^*I u\,;Vuߏ(/;AH8tЏk֬8L& >  b<թEQNo%M3TUU9[MDg20Lh|444P*ZنpPq zӭ}Oi򋱓ѲB=:R?ϏҔC,'lmXp2DQ~*Ȋ"BjmQIDpXX,E$I^RY@QTBX=g5FGGay ,@HUU},Wq:͞WxxVEQɾbf(exgO  _ 7[VJ]cv+U&Pů[K"R:]s(ޯKQToG%DQ,Sll,dYn$!MAб ˲ZHB X__/<_p%[B!tA e9-:[gU[[+ p<6\HĥSq~rШ;{-q#kXD̏?8j⃿n&,;VHa$ڵW__a]5i9i(5xK# u333,%& -仺=z Cח8q+gϞ,Nih_ۡCl$R]>imb: Iubju#aCowh9A)<쇝qԴ5鰁҇CtZEEfjh3dѸeمw}w3W"YeTMSSS30`a|I|0!!j}b PRR+ ĉY70#}=bA=(^h`X2qD#ǡWy{z˰,q7Պr~<^HZ DyZbOvOytCs>u9*n-$7Q59[#Ftt:0e5,˾u}5c6F 0w%-\wudNNN`8hšz#eΜ93ԩSMXH; áZo.3 $Iɞ63gzw4M0ȃnAzzz7Zlڴ#]} h4n4ǭ*\hBeL ĔL<&e'(A#H$ʪܝbt^DzO T%\'룻1UA_C}C/8B;E+--_~?[VVv;EQDZZ177Eeeefp8O_~hb=#mΝ˫`4l|e1o<ӏ?8av;Ç{444`}O>& 6/v37|33vXjر<(>(o(_YY9bxmTɓpGbٲevAn&pۻw޽{{ٚÇC>Z|H|-{i9?iCRj$wFBkBf8^P$qVj1q6l{9Gaa*9Peyyyy+8_'V+y{eee(my/xjmgϞd:UUuVy A]*Gؼy3~GUUUXpA$W\\W^I ͆իW ~=SYQoٲEI͎bؘf%Jpx䅕Ty%s:@YFDD,NNNA v;XUUвIJ$Iev IZ8x𠣠 ph4~vakʣYHHHj^@ȡ&[mtXT ; \c(Sf;'jQ.SS\qwDV# 6T-M ٯ ')vHJ*A*>}@2͌֨,Xଥ"9hh;_%'ᑎG7o2|I+8^'rl) VSjND/M,X?Bme06)nڍ+hŇmrMއvc ƅXJ].n:w80_-4e w @ Ζ8Oe{Y33]\ F|(.mtS(*)ܩVW(I"ӨN#t=_= :U 2#g]&5&G\)bi<ա/.p/x'wICJ _vJve+ ~<4+v9a?]m`Ji,m3r'@;A\HPs]v4]i#M4uʖ*vy\Ѵy6QY\Œij4#p64⢡MX743ڄVjs>Lt=,WA8- 4":ʎg.8߷ꑈ=Qd3|RNtLӐ稈NCx]@-zh0#]$4c DBc;4fZg=cЎOu_ m qw.z?jknws1qBx 3x`d<m p4?Ww.G\`+uѻEnWYs}/sA4WOw/0>j۽]Wh5xUW=r+; 4gχ1DIИ-$ea?W }ׇ'zųC PUHBHJ'+En66ɻIDATB2H29o$h#hIm]Js#ۍ"h!W7hʍ|4h+hj}*qљ13DC6λ@{NA> S"\- j`GY??C[E7YИW=_x}h} m9L+ߖmϚh\U-\~`;o!AQ5nnj 镸Оϓ.pnE@c@OiJf&h+ot+:iJhKVMQQ sA3㟸?m3 >oJ-*@c|, 9pBh3Mɰc{ 93#JSoΪBAl*~/ Ж̫s[;GI")=-{hqeشd?{IENDB`buildbot-2.6.0/master/docs/_images/full_logo.svg000066400000000000000000000565441361162603000216710ustar00rootroot00000000000000 image/svg+xml buildbot-2.6.0/master/docs/_images/header-text-transparent.png000066400000000000000000000336301361162603000244340ustar00rootroot00000000000000PNG  IHDRDٗsRGBgAMA a cHRMz&u0`:pQ< pHYs  ~tIME9'dqtEXtCommentCreated with The GIMPd%ntEXtSoftwarePaint.NET v3.5.5I6IDATx^ՕW 0"0AdLT (I4yg'zrNJBA!16ذkcaAzpH6sWU{~5h 8{SYx_2/%%sk1 +~w9cwoFh DWƌ؃sy,ޓ(>aqڤY-LkJLI1K3LZ|I41ML- Zy~2wNʎ@.&n<M^4sGZM̯JZ1aq;IN.01=!פehBM[84m(ny8f7gQqѢsj`mI#d AQvJ欤<2CsR MjlZ@}%^ke*izT7Cp 1SLT;r7.~sg!ր'!|Z`\ D})`AeRO5K奲2u}$.!xQQT q֪jl~m)9/ɾy-_ 1<&Cf%-E&G\ֱ8rx,A{\m0:ʼ-z IK2B'ηY.rm< =h l7kdz|A BZ`-6y饦0~VGYye&[fd[Mcki%Hg d9I+@D(Wi0ͱ9E-Lo,9=zA5@Fro3/‰f&+<`nKy3(U]J hX׫mY6V5.bg+laΔ*g&i_R MYr.9%.o%ŏc/Z]?Z ۢL@-55_mE= 3+:Geq-  ʕ )B =񮠗":E9Ra敁eYab'̲dȩjck,̎wwޙ:239PY`E1t4XUV.ʪ2UELee.$PWf=``*uUC.sEmYwmB+A|Le{ՋWEd;ʢ(KrM@,<o$W.7-7'ֽa6=C̦mY~fY:S_iaOX-ͣ@tЂPeMR2e?IM99g{ gzʎ-ΪPdU:S|v)!Xֳ<,e_fh`{7}ϙzlxmooy̷_/7ϼl4uŭ 8ׁ2@POqSKM,+%fowr @BL¬j LU^RZTP*,@i]`-R2ĵ$k7E?hzoY`M3 +~jOO[/¼O,o4ļ.p_{m)-/>sֺ>⢲6oò,-A]JZ~\vrRsg=a\HU]xւX!hCZgj M@- $*MWzbM4gt@znw['d†w~gU s?6{'P v]Y1`Wb~U&_L/s@[י>D%R_dsr?~_ (/̫|S-PQU恑L0*Vh90(.płE&6%} mI0{&Y/7Ui{~f#h_y]=WO RG_6/{<'͚~SGƬNUW`Zߙ@}yUVu3]zRFk@^V 5Mk(mZWh*:6*cۢv;K\_fj MlL , fkjbųLYRt= ny7 Ro>)5}V&)wѼr_ګfê]ͲV]Vgڋ*Ls~bjSj;uק21}9Xh\XURXi✟=X7ޑ1J1+j2\iJ$*BJkUIHE5f]i6, c_qQWDJX(۬]M$7/?캗ýMwUi*(7"S+Դش2RPjj|BRQ2ĸL9O;UUJU*Kg͌}b8^9 ( ,(iKE-ĚLFQkZNʻdhs|Y[!kLbJ@mŲxhS[4.)3e6i^/M4O1S)[hQՖRӔW"+E6Njv̔ @ZUkRZ6]vs0Sn5=^n+7صպ %-vחjyuؖTa1;;/_mhI8+q'L|r̙3Sǜ|Q#Gr,6=[TB -ږe٤WFQLQ<)ռbrFSuR[Ư..jΞ4mpf5pmޑ;rUu[7mRwTUPLQ#Yj\_nI6J2ܗcōM4NR\bK]--50B~csv `Me[gamlԵ!Ȕx2_q}Qb+̼O&Ÿ6в>G5f3'Nh] ͂;Zkat]F 7[t f f][nOt`dok8b3fAFsϗkjʤXƴ0)@a}Ae8$@kYk-ICI)<3ܫ6p =aTyp{mI*ZT&Av(i{M], fq[[of[%Dx 5v)wMMl䈑fg:3:o~R)f |/w-[J:f><[{؝{~:떙>^cmS N\dTN&w[͂YX2ӮcN:ygHGaN88]t-bu^Yl*-2 *ju}Z ̔R/Ŝ3qʆaw'4jRnBb*jP~l! (@qQ&Xv]\zs[̑Go{;#Gc4b2ru1b/yuĨѯLj=ȣL=uK4-05D6PVoeA)[Ldcrͤ6gMR;nl] zl)'V+7ttMr /1+ڭ,CUc(lsE]@e>~{wZ{y#=h.hpenTU F`|M] l_oY~)ŝfKISY.qaF|We}Cv.^O;G"{T',4!pLNV/;pǿns6Jz2*2\c~^a:j[[kԖiW\)RWF@GN;L>,Yİq|6-J Kr1|S+qKsimY)xYEX.-o5qKr̤s2L|fu9ȕ7p6uuumVAf…WL;ٿQG,[c~|wBbZ[[?Kc;xQzUW8S__}_{ׇ\p;wLٴ4ʾ>A Ua&V^Y`*ZI,+({,R[AI&),6WNAݬZŽ)fν&qDzjYY(8*Y'VU{C2=mUprMmIy6~={•2iPyg(7/ſ{퍕&??ߠXc_r%ن(==ݜs9 ]Gyq=襤$?5:1G~.(.[YOv{slv iH"2 F_n]cMVvIU[p;Z5rU\\M5KC5Bj*[/ԕv]\خ9.nR%PkK^n*ĎwUT)ī(o ^f:ŜyOr@jyɁwnn{;8='%ɓ*Gnz/ZE*fcc7nsm'Fj+w;D@ζu_R_7G*&tmog(ǑF"C7Z .[/oM}eڑDU B6xZ& }6Xiz`JeV0,qw`&Ε6UVP-WFwvjR3fg̽O,꼵ryw緵%HWH髖1J|IUCP֗;]Vu2L8mY Üw5f+!i璓]=øuِ̪;spȑ#Q]2 5wkx #9b; ,?"E;Xp?~89SMllYhucpY$5 o3@ƨ݁eRX*xumAm<[1Ƹ̨mRur*k[ vͷT/3M}(Q@|].E|eoU귴 }U5+wdKiMaY[md}WMe6̲fsϛj?ٜvUm6ԕ}0jԨ&1͡ {qDž+s< oq^t`V/Gןn%x4538>COȂ~MA׸+/w@n,[la\ d$R^UZ2u,I|gVpkXnW`TYwpS _c.k[ AUR`q9X,x).pOTK@!.%%qsgptD:kwx 9/:{ˣXyg{?=E, -mUOEWVn +{fe3e>(lW)JmA(ig`@Jgxk/j Mkߥڊ_lr rYpL@&ƭ- മv=w%Zw B(u>*//wuCʤ!,rv'\* ܸ;k؉?=6)'_*\\d)/o^c&# E?mmvl*#xy6^;r;PY@$E<%porr(O" $sX0HA7*Gs0yCc Ⱦ.KʍW\wb\bFeQ*(6ߒ|]yiloIC͂ a}d#ύFI  Rlk8%$~ |ТkF!]%;8qrBlYyOf-MzQX)N*#p7ºU1mCv8e]FۺZZWX7 W[iv,@,%ehx^p`mHUPh;Xn6RcmblNz)ܖKݶ].U '`oțxz [;g 0ln7 n9ꫯfi;,u111I>b<01qot1bA|?h"됲w e>E,J/A4@Ї->Nտ[*%ϟoaph6i~9IʍK +pQY=nSŭ⶗7dX0vP`]xJt1*Xt$PUvĪ,g;e6'f\o_ k3%M7͗;|C8u{F ^hfϞm\HJ'$$ MCa$.x]7p"of`R঍,,OIݯVݠAz8 %rߌP'@r P`lA~6CL1cew1J-V.>S엨|tIZV,#x&MU^B)+Y f|=\FD9]?N%H(pB]WT]f6n-dZv71VAĽ@h'Z%seMmլ1uST\1Ā| IMi84m'h>m4kĆȶȨ~s]4l!KYbMcdvA^d9uwP.r_җ ʓq>=o@뤡Qpe&+z5\~LWl_=&Oܼ‹7_tݟ~k3tW&N,skIz[8N7on%9Dn`w1wGD:r; ,!1`DŽ*OxžĴnYs Zϛ6![o"&`=Jӳe{ۻsij "|pgGZKAE+PVkqa)|]9{絎r,ƣpC`*ST6GS2YpgN6\xֹ/fʮ],Э(/2e+d8uÿ28/K!q `'{͵ΕeG)[Wk(!uTkvF`J۹{ں?xMDljgB,P_&ק$ס6= !:)PvZ:1WU/ e^gDeQMܒUUgRF} lwNUms-]+WvciNwt),_N? ؽ}z: {j]I6 ӱ z4u+ kJNC<=H7쩲dnJB}˼i/V:㐁ٟU/X[LN}=٤{LvVZLKog͵* d@廅;laB-GE^yTpY@]<媙fgiй2bdȎ; zcp8y9 XOn]E<ê/2ԑ@UUU9]5Q{W@]7;=%AnV7(qWdeRd2n˟KLį.cbjP[;:J@ƹ&Cfެ,G5\2> ('%N6 6"Z1@LmPZD1!^f!tŗN7 Y&e 2bZ"cS>NB7z]XZ/j&Z0 L2dAXm{@<*jLy^ r9J<orJ@Ͽ^eŪ5\d2Ylzٹce`y˃HR1,u^7}*rvK~ p6I'NwL1s\Z7ڊ4ȡĞcL0lFo ŀ{hr@ oALmNޝldݲeO^XR@kX\d7<_>zQشz *)n.pgI%QU\Ntlzԕy`QKN*:mYSbinܸй<jh2d dNE)D_YJYkF\/l\ˍ>$p#o Zs7Gnȇ̶`*@~F{P| 6Ӌncm#^ʴZT(%[ f~uDŽJB}҇Rv{dKG2eeı2tq2hǩçAM7+n|LQOtscŭ,c|$v m|0c UoAn.4O3 Ϲ/Ѿ6L5 G oq!B5 s-[d<ֺ?[5$ 6A2pSPJȋCOt<SOKq4Z^һu?F9h^IKTGЈ{Lt)1nkgt#tsIF}t{I%[!C3V-+;6i39N#K#@ : Ny'`KQ)dIL05e ̳uƵfHQ(/\g\4H .v.> įJYV$K-&/;R<ǬUt~S(cRWA.p";7c eTpi[Bzo-# skD8nS9+jJ2XCh(1Sveh!K#ss[}d'̱Ps|xĜ'NP/,g߀zŭO?#7IþyЃs^/^`9*22dJɘ9ۃQ۰?[GJ&ɜT92B &SH%>F':l)oKjtczy eQqE<Ǿ܁=.҈St!]?V6 ==e}d<n3:ybVPbBt]|Dٖ.E]2(17O)R͵+3{օt󣺣edqQ/f2 Kd| Cq# e(%e(640F“ͦW@y4ibjѣ5 0xcn3 ,j@'iKπE/;07Ha[4 s'&Fdkfg5 hP`2͌y[@&~$>"ͣ@3r2`Q7}DP!G@2ǜ﬋v|.hw[ zIENDB`buildbot-2.6.0/master/docs/_images/success_normal.png000066400000000000000000000037501361162603000227030ustar00rootroot00000000000000PNG  IHDRTjsBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org<eIDATXXiTW@B-X2ZSZZ3*V a ȾDd4"BA VăEajUhG8LgL̏gnLC9x9wZXs9 r 9SB?}9x0BƿV<# ,U~ F?wU 9p\ wןgCL-5AI,9i zXuyЇd)w# ;y9Q“/$H8iMO }W'=-{XXjIu-|9Ź4({ ^H!*=}gYtr${mqA=98tdch(_LP!!opZ\A1]E\笀(lpDXrD|j5vL˯=.nvQF5T;"OY|iBܩuZubBS- ZstE5e9^?E> W;povӷRYkhӧ42{ G+ BZ}59ԮjLuܛ@W&.ca_jظO# `b ƪq7 -[O^=Ͼb7S/s'ѭǟLRE[H E@:s(/ Q5f5f_`zpn{Ń;p XMITP{~~v&wYS~h?/ձ~'_9nMB{Wca_j1ՇQ`q*|{i`nާvF.|;™jIߊ'OGxֻZ|TF<|4 IZob2]ZjT!~!~`ٸq~v1R Z;+_ک=;;&6=3&HYe1:p4Tgr㏻⢪ na,O xL͌gP";ʅf8hLRUu:udrO~g:)lc0}&d|ЕVܸ5Ze 376i6Pʵnt">4#-H[&`̣9q+ظ#>(u,":)BqE2S'*ݽMoߧpQcp cC~1+q5xIx4n*rjGF̖줼WsGD\R2Wb|tVјNHe qA}h& šغQ?$rjlB1ڒGa#YBI(ʦ|Rv8n$VȫO}lr Mlᱷ!P(guHFc=H9!_(1fUJx]f/0dwb7-Vl `Ún [:ˑ?|Sgs^&6o_]DPg\ ߗ{d;=CKm_6~%AsC5|[ {bٿ"?|6Ya_zas$]0R /IENDB`buildbot-2.6.0/master/docs/_static/000077500000000000000000000000001361162603000171725ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_static/icon.png000066400000000000000000000517241361162603000206410ustar00rootroot00000000000000PNG  IHDR,,y}ubKGD IDATxuxՇ3ZВ, 1DZ8ܠmh_M4fj8 98qL%[`ɲ,b\-}H;3ռ}qzY (((( oRETh9 \`,SPPdK7}vPR/pW,UPPVh@=;( ö+((8!^`!@7XDV8BB##Fu#7Gy(x`0 XL\0,2)d;_?}i?g[(K$s  #ȝLθ-{슥o[cۤ#jG}(Kd"8Y @Oed5zQɃ:/܉j.N:kQ+y6:ujyGwc鴡{vN҃&oR Z`MfNyȈ|2r'[@Va.0ULe^մKuߺ CBqX !oSـљ eO$=2s]`h ]K2Dǥ8{Iay;شKC.̞J'>Xj qId3n&Y^n'#BiR8#;xGFP ɑ3B]`O%(+pFXs:5 rzC#F;QfSNԦ_mw_MRݥIZ5Qo#9 B5s5VS;7 6z֪' wFR9uɀ6RZ5|%uRei88,!ߜ\$| QI^rXī^C-Rv~A78,$QcfSzm'od_QL݈<& +f+vGr| \tyΪC(0& =kI @2LS8,bp9MCU R3$ᜱ3<o7!`PYa #;x.O>"0"'l_=o?s\iBig L_$wI³%µBid zT`g <%I弥 >=ȕrx֪BqXHΛLPp`& +2n p [kLo AHkqipI=v&9֧t5cP^Y[ 0uఎ$ LWIVnS'o>u6/;i("V"Srco*6_5>;B:%IXg@O͞gu9phZ!)5S̝BF"b\ca 0,=kk !دHKfT2O<OuR}XRJWBF|țLVTFfV2 EI:^}z:갊QRA!dN&3o*yIZy TW#W(Yâ8ڳVr=A,Z|$ +<WR{T\Qz\гaԅr_Q 0&:4Ջ垶͗ǡɎ y  g}U=;u @:Z!o/znyϕ;#L a-A/mDE@Q59[9a~؋O y 3ԫXA]yJ waiz?8/;?)X1zmrX3^93QJB1vJVJCi͒ ta0f|oءvҲ'po#Y6DK*2)xI+ |,U_V߳` &kT/£̛"խ^U_֩ 9f)DA!\}ǫ?DCnCЉz*/Kb:8 f((xN| ϓ;rC<r CgPԜs}޳JAèTj'C7gdOL}aI)AP5}=|-r뽧?~+T*9 `\1#=ks:A>(iN9 ԹgpM-F0Y:,SpMBãLzԨ!KPJ:5zdHSZc/F`N6lV(%轄jaߞ-TRW]NC]͍56ъbnav;vjVVg`Jph #H˙DvTFx 3Zky+,"A|޳V _tXwlvY,^2gxa)ھ]SQ :;{:JJDT#24cf-+8AgG =~=%pM0"қfdN`cٽmk~ 6yۤT*#-`g3v\lg¶u2p-`Uk+ښVVc0xϪ߽öuQsէj⓳zYXXptƧo<ޑ; Z%Ɨ8~;=1V6/{憚AL܈č#69yJFkc5MWspvj*>ĔQ|-Jصy9o>u&I9IvZ_rX'K{6$每~%s|ʗ=F[G:}0);3 r*WSYMX̦iy *nGQ 8ٳV/9b@rܩWrzbͲH k$)}"j:Un+1wl8"* {1Ǹ*."K HpoUU8+xQiz74F:oq-ݝn | H/"5k1 @hiGR6O<88̍CB#mu۳cG;4FM\!8yC\l{ԥR<4~hu>UV^{;xˁ=M= 򥹇5~ =~=M}:gE7:6 Rsf?4܃[聊"~MbRIJAK's`9:d]K=8SӁ/܂/8l}EL9);|__v n'!ome/wdc7.ɩο,ˏ^M[KTw3BOGXWlMLs%s||o$*9S&,:3*i>M:[DX-f.đ$)NKMYȮ?.;T==dw5lXd1${Vm-L@SlXyn}F2r 9C#8}{vα[6݄qbz6,:Dn?$r9ҧIeZ#u\b9wx*,f9cgzB` f̓,NC~nC+M۳YݍZ)#Cf݋)/,ٟ=O,I#>m"Wb5wˊ6Bބ9^?BiM+H2tX@Ph8 8CT0'm<}TJvTL\p O*%EmF&6f>6H"ۻSg;tb M=v"u`*90d3o DGU$ )nŇhO0̻ɝv,~ad?ڊ-t4wقn#;e JE !P$3-~[tZ9Wއ$V7sxW1HP,wHC$}+ݵHI&Qy9vh8AS+``ZH=bщ1XG=(/y9FԮ蘻!RGQ hA̻q2I/W(dv`|φ / 3o,G'jW>RrjxVkhktyWL=,A+&$wlvnQJv98X7 4 @/{%w#}FPXb]JE_@֐w,{[feW9a=X£7x Lg{c!1ya7Eo)j;ϤC̸yW3\/X ZcD*w-ljg` f%&XµL5 @v:>mX{+VMw'܈S;IȘ¾aN=tp1q#HV` `֩/A}M!R`[_xdžPidžپ{Q{tRSO;*젼|͌3ॻhi+1//0yir;\AJ` ؖy/v}P83;*ݮ|5cmV_@r+om:O;,jؙhuzYzfETL;oG$y};o QI暪~mQ4uim&-\;y6o_EѶ_EO%1{v;O^*cθ^[;.jWSi\s=BRɟ8σ&x,9 Lb‚?o( LaE;ψ?k1 \O*wq1rsـ^E<-/8hAر{*bh݉ƑS;G-:czc:c8ZMXD8?Q]YAgG*@VV}=eBҧb_e6\溽t4Z:[lmZCo CNht*# IXL1#R '²/n1y멛_y4gX!)DE'{Cy)>q *Kw'vTSLbjhk.f5@W{-EH㲈1c1'Xx|gQ^5U%o"#GQ*)5H loaOGM;cH+Nr5Ԕ~A\}۷9#F>iĤmWLE>|.e%fh )% ˟4/`k a!8)x JM:t--}W6vE4,x[$f"9g.qh=|v.W{MU %I%|M9WSK4.~Q{能6jkbWT\*Ju@%B : I7!EQ!=Z ;Ag #eR|Ix{־CwgO^+6*vpX)Ğ :Qcf߬_:C(iNBŁ1ĹhU/qIAHxrT]m͟If٤䟀ƃ;]Se,im>DXDgׯ3LGom)-}ҙ R#̦jE'AmBHHu=D ~$? ]*ewR?)1DfI2&Mɺzvߜ{=bS_[IM^ 5l5U{흉R?lt) Kfe! ,=A7mpnz_'Zٻ]6}5|i/Z}0cG-[,Ua\1; `AJEceo-.n:)]~,Rv6'K/G-p0A?"$VMo(=!mFY$rX]mo_E8a\rw,@pZGIKdLocRǝLJ (~ʊ0=B!ub#p'H+[6f5S]z",.qb􏟿;sL].wkh_/IB\tw'h,\ٓ:U5@|\ \sEtw4(1ش.7(ehݫ\F߰hJsB* 0ۥEfhq̎ej hO`9+G!zuՂ+?Խ]\FBRvN5a(1 `] ǺT@OTlq x@\\0EXeGk#*y^kfƮ-aC;8`̬JӡTj-).!Mla|u/ݝm@$pGK@BqɿEW[?]??bvg&gXR֭o֗ٷwXDK&-D&lwRU^IUj>"D+CJ}dvM%5sM}77a6ڇ6/i CVBQ4ZgvE|t$ջcɱQʁ* `.B CuWlt3 P_nhxZ4(pXAQC)Ο8oR^Q8z vW<Ƕoj0ـ z|T_91R<^tBu@"Q{ptpn}֯Ml^mMBPo!Rap#6 ;x__D3046X[ IDATKtʒg>F墥.viypXT/U]P!41r_V6|r3%O` G7)|l!-ƇfX/~:R8N]8GIL&&^6¯/Dd sW ? wJ<0v_ q!Z Κپ>6 ,U4dM] w.kcC&; #HpdʠȦ6|'Z,ngeM7ppwJeayc 4n{v,Zt!qN]kؤ〠.1|Qͦ6|q; Ro@ C`DVMC4H(DTWA]s \"DX%84Q\|Aꩩb -l_iNBЂrN[a0|0CmfT_n jy-xή-X˟av&EYlZͽ쒺>sD8!ͧU?c4VZRU^HsCCQN_K2'Gh~ .Y sMOP$1PU+J9,[ "H5F0D^aE#vqs\x lv>eoK]!$ܽV+ !m~w:ho:.+"/#+J,fΏZk.;(YĕhE>+uS+;ލ%9K &,.҄eZ*/ċU,+tp /H֨C;zzaEuvsˋ_h/x#,ɯ0pOv5EX\t+@jXذSLuR+;Tp?kkHmҋXpJH*5`QbF!sx eYDP!;mTppk/JW=KwW}V`V:?lbώբ`MuK43yqN/N4wj=HH^ \n\BaҕϊlUzCz=;mwRkЇJ W9, u.ocKXLm%>U\' sՓT vK7wAab8D;#jv965ƎCcw]jwP9fɪtVpazYD&t9SoX3&{c)=8>_KWh [8D#YL~IA㦊Rm1uީT a)Ȍ)n3EKyHf=+:j )C/HJCKa (+A(zCӆx9&4<cpX_<)[xU3.)]R#,E߇Mwȷ[FX2T%]匛Vɝ}=Ak J pe`c.)h)$6OJLN.sXqw'3'ץ2`=lg]iaO35jj E(pJ1OCQRtHXѮW%$KG?nԲ_gCqX~)c LPTN,qmQH@LMylQі `@;pwH-.7jPc ER3gܽu6kB4zq#Ȅ3o0R#u /$$3ocXs<5 y.!?HEQ^T-:@/£.:ܘzRbiR@!ԱqF_cZٽu=8N>݆E,ui`K!?i^Ƙ OR.9 zͨD$LxY&:Uj t5K52`바~X d{ ѱqIVk$~e֯p8`bȨ3{πg JѦ{rk<($ 9G*-j ځo{8q( zp',"/7@"ձq b0|֯L-ۭ`>Df* }@I{e^2HI!k!nwt^1*JYxeWťR` *uX I#s[Ups".度gTw.E]E}i밙EE @,/A!ph ̈xwJ- ڻ:JC$W/}u@ Đ<$6=l#;~6sZ`Yîp-J0sy3\s8- A"Jǝzհ)%,.VW=QSU ez "oq B1XtRg*ƚJE+{N+ B&<T*S?)#>9+n}^p*B5\yUW'$y aYA}H:Δ!O crHy+jxfKw':ep =gW9]/@`G%y*$CYrK'I"v-EA}TvY:196\aSkXD܉F'=D{D] ==oz p<gV,V"ŵ{}XZ_&lsѕ(y9.*(A !Z1wJk~1j 5B$ʠJI)8 Ak)BC/#ܳ;x4zf)+cJ H5~[.Q3!t2wILM:R8}6L}aR:fB%(+O!BRs)m|> ],3!4δ \;ش4RUKޗX_wVc.`ryTKLVSU=m@uw՗R:Dc8(I8R";zS䰢prkQjmj@pKflޙDJhn83_H u7??)*gw ! "v"(.ZsK*6*6a[oo@T|&\"` ]߻JS亿IDcBTg~0'!S( wÌ=jbbբ>-]XL5FPV]IL#*4ěOU?N4CTݓF[}G 2- rM;,nǧ* Gٻs ݦ޻*O9.iod3 $!R5DF?BZciZ/*hٵg6+YǠ)Fs" #()\'&!L{bEI](qE`*2.?*Z%m'|:;zC͕=jTvWvBӲJwo R3$bڼiokr{z-$GVnTjf,8n~F8Ȱnѫa|’ dm-W0jEt}]s}kO\/* bw j EZ^(_Ax0>&rpa VB٤8P憃*bGM- K7<$=:Qit͹j<b dpӃqu$*p"rf%(Bʏ2a}x %ML{+Mdg!\ȢO8);]HS7>-ڪ7I'ep/'* g/Fk#.pAE3a̓~J"9m4y0z\FMT]Lɮ"gRidVv^ǒ6.*V8/e_DKS.;Owhuǘ.cώՔox`]Z!!9|27q.ᑮ+p2Q(-'5EGnooX͇ßVKM|RI:@8ʨ*47Ը*"cLj|RI(&x?ڌ1d[ni݆9,FٷS9:_E~?+JuwwGxd9c{lv?t7"&k1w IT*AJ","£≌I$,2N)率N#Rdj/QD='>mQ1blb{A XǏ? sgcs'BF;5F6nHK߈춀[tz:;Uj ᒵ:-ԭhwy%tuz2px: ;Q&#AcBSꌛ -ٿp-OwK)ul6 ݭv;K vX*'/!WR:#`̋].ի0(,Q>bd瘚ޓ!9zzx^'|4&_%`ū_Ϛex2ߠ͢xCX͵K<@iݮOi( 渆Ѥ̸F"fWQL-{-Jѣ5FȞ#S <\ii K3iػڝHP a#Hw'?* ; %E! }ڃ}1ٵ ;L-1 4~,*UkD΢zT4*Ks+L ǯ܃!>f փ[&ŻZ Ne ÉΎʊ6 ,!Ҵ 18zbQlh KSQQg\ȴᓱZ]MeTNJZɬ.e h6qIDATJk!JS TWT-&aɒ}-W-.3`xC7s:Ò?FBwSQ +>Koz1 *"6eox*몵 a#MHL)CGLAFj)>#\ *EbHH\ajl#ٛ9+<}VLlV+*vSce7b0P i&M"z"q:o0#/n=| Ьv-^pY|;vht$\12V moħWM֛m|fʊ6Q^ҢXkAcAxTbr m>Pevsp뛎e@"rvXF8v5z&\BH[5@ꋿGɾXˋd )]mTlcՔmt;V*pbF2̅.[l(I]^ Hz_XA0avSUFMDoJ䜂IݮOpThm:s\}3W$-Mu]JIP("ӎb%z_(E@[ 3m~Sa[DIDuM]صLy,T =wʊ6QS%4*mp,cX@Xr19ݎW*'"X#V,,CmBǒr̟Z(j1γT_q^% mTR^,=sI$Z>4ЄDg/"(ƷNkjwV ځ3%-U밚;H1*0F1rTzLv|1k8;|Aٹ6`QkB T8brNDf:?O@ؕ(v;.‡&ђ\cǒٱҢ$a]p,yDe.Xe/e&@ 6)A0 Q6+5^KtI^1?NBkv85?.7>IDIɰaUj-xB ׳}Z;:+ rXc*N;x&; %bt`hTo~Imǯ+yxBbZ(ryJDhxG-F&vo ~+pcgӾ_t50b\#eM_v[3ʇ/ESAN</Y>L]ۻIjCHB& ҿI.v8`Y2]{Ð9MҤQk%:|0n\tchu$a@%(z#}6m՛!p' p "HDVحTԶ2YӸg1`Vw$ R5DMT|"F4,Ba_C$su:>s@qXׅ[vI&bm/a~+Q0K*ɵGFaEf̧LTS !C @rXo MXڝeɾ\uK'gzw% ! u!|rG7rXAt6F_>ߔd5^ԣRw&#}WL;~#L޳}5eś0w;FRЇ%0+wrXjAdJ)>K~s/ܚw[Q}6j#O=$ ,$aCD*>ɇ<RKt@RW_k­To|]Za|ɷ&Ik!*J8IZ#ƨL’&u<:t4| zayuӄXZ0E XM-$hRgLlV>zkeLݒ$Ic\ *x<ɍZkO%yUh6`jfߪNw6~M.ݵCI"%b,"FF3RS c#jaA?['_.d`)=MG0w6HF,R?$>Є;8u͂fqR)IYU(MJQ_x!IQRoHT7jKK)K5 MbcvoIp gÙ'3c̽3EIs/PKL+zZ#,P_ R-;Զ-&=Vݐa랏q>3=qe_z^,O^o l|*Mgh l 4A=B}v@52׀sUZ#ubMu?[U)`$CϜ?Ŧ }nQ|#|\'ݰG>Ǿ=۸Uxm$F*2H,t3tZUPV1w~8Eb!&~ۍ[EMXFAe>UȮڅE]F[6<;r=~$[vVYAu+{xpGV+3uE7n1 7"CB}FyӏjהAn,gSqmL_;ˮƎ=\ʼnIChP5JpNXo$`97|xh¶ #=ݸU2CBbnL;ݛ{,w,_bƭc1qv?xB*EUeLߤHV}&=/i :*[wPzxpަR jAVet>_!qk2:nzco'}PAVAQhDI_sL;i8]mkzf*bЋ ׂb}$`U8Ei+?e'ö]jzM1P;K㸜bp!ߢ$`UT_XfP})Z}&T,D,ԧ2ݩbH Hx}㿨 od~^a[_H,I1" R HFN}$Aw:< X3'Li bA1@ոy;Nؼꋫ43v  + X-y1&ϰӼo,=֜NʬHX7 Xbƭs7ƭ uȅI*WWűǼErR QyOR\N7%on~?ФCb uEH.* 4n-Gꐿ1吀}K,<ՃG QA}]Dne: nW{zxAByUɭ LqJbr6PWo^!*@j 8 Jxե%7@ /"@zAa*`pu{F'NIENDB`buildbot-2.6.0/master/docs/_static/icon.svg000066400000000000000000000022341361162603000206440ustar00rootroot00000000000000 buildbot-2.6.0/master/docs/_templates/000077500000000000000000000000001361162603000177015ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_templates/localtoc.html000066400000000000000000000001421361162603000223640ustar00rootroot00000000000000

{{ _('Table Of Contents') }}

{{ toctree(collapse=True, maxdepth=-1, titles_only=True) }} buildbot-2.6.0/master/docs/_themes/000077500000000000000000000000001361162603000171705ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_themes/qtile/000077500000000000000000000000001361162603000203065ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_themes/qtile/README000066400000000000000000000002771361162603000211740ustar00rootroot00000000000000Qtile theme was originally created for Qtile project (http://qtile.org). Copyright Aldo Cortesi (http://corte.si/) and other Qtile contributors (https://github.com/qtile/qtile/contributors) buildbot-2.6.0/master/docs/_themes/qtile/layout.html000066400000000000000000000001451361162603000225110ustar00rootroot00000000000000{% extends "basic/layout.html" %} {% block relbar2 %}{% endblock %} {% block footer %}{% endblock %} buildbot-2.6.0/master/docs/_themes/qtile/localtoc.html000066400000000000000000000001201361162603000227650ustar00rootroot00000000000000{% if display_toc %}

{{ _('Contents') }}

{{ toc }} {% endif %} buildbot-2.6.0/master/docs/_themes/qtile/sidebar_intro.html000066400000000000000000000006621361162603000240240ustar00rootroot00000000000000

Qtile Documentation

buildbot-2.6.0/master/docs/_themes/qtile/sidebar_links.html000066400000000000000000000006131361162603000240050ustar00rootroot00000000000000

Useful Links

buildbot-2.6.0/master/docs/_themes/qtile/static/000077500000000000000000000000001361162603000215755ustar00rootroot00000000000000buildbot-2.6.0/master/docs/_themes/qtile/static/qtile.css_t000066400000000000000000000062221361162603000237520ustar00rootroot00000000000000{% set page_width = '960px' %} {% set sidebar_width = '220px' %} @import url("http://fonts.googleapis.com/css?family=Inconsolata|Open+Sans:400italic,600italic,700italic,400,600,700"); @import url("basic.css"); /* Layout */ html { font: 62.5% 'Open Sans', sans-serif; } body { background: -webkit-gradient(linear, 50% 0%, 50% 175, color-stop(0%, #ccc), color-stop(100%, #f6f6f6)); background: -webkit-linear-gradient(top, #ccc 0%, #f6f6f6 175px); background: -moz-linear-gradient(top, #ccc 0%, #f6f6f6 175px); background: -o-linear-gradient(top, #ccc 0%, #f6f6f6 175px); background: linear-gradient(top, #ccc 0%, #f6f6f6 175px); background-color: #f6f6f6; background-repeat: no-repeat; font-size: 1.5em; margin: 0; padding: 0; /* text-shadow: 1px 1px rgba(0, 0, 0, 0.1); */ } .toc-backref { text-decoration: none; } h1, h2, h3, h4, h5, h6 { /* border-bottom-style: dotted */ border-bottom-style: solid; border-bottom-width: thin } .admonition { border: #ddd 1px solid; border-left-width: 5px; background-color: #f8f8f8; } .note { border-left-color: blue; } .warning { border-left-color: red; } div.document { margin: 0px auto; width: {{ page_width }}; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 {{ sidebar_width }}; } div.sphinxsidebarwrapper { padding: 20px 5px 0px 5px; } div.sphinxsidebar { width: {{ sidebar_width }}; } div.related { display: none; } /* Style */ a { color: #369; text-decoration: underline; } a:hover, a:focus, a:active { color: #c63; } div.sphinxsidebar .logo { margin-top: 0px; } div.sphinxsidebar .logo a { color: #000; font-weight: normal; text-decoration: none; } div.sphinxsidebar .logo a:hover, div.sphinxsidebar .logo a:focus, div.sphinxsidebar .logo a:active { color: #036; } div.sphinxsidebar input[type="submit"] { padding: 2px 5px; } div.sphinxsidebar ul { padding: 0; } div.sphinxsidebar ul ul { padding: 0 0 0 10px; } div.body { /*-webkit-border-top-left-radius: 5px; -webkit-border-top-right-radius: 5px; -moz-border-radius-topleft: 5px; -moz-border-radius-topright: 5px; border-top-left-radius: 5px; border-top-right-radius: 5px; background: #fff;*/ line-height: 1.4; padding: 20px; } div.body > :first-child, div.body > .section > :first-child { margin-top: 0px; } div.body > :last-child { margin-top: 0px; } div.footer { background: #15181a; color: #fff; font-size: .8em; text-align: center; padding: 10px 0px; } div.footer a { color: #eee; } /* Code highlighting */ tt { font-size: 1.1em; } div.highlight { background: #fff; border: #ccc 1px solid; border-left-width: 3px; font-size: 1.1em; padding: 10px; } div.highlight pre { margin: 0px; } /* Homepage */ ul.screenshots { list-style-type: none; margin-left: 30px; padding: 0px; } ul.screenshots li { display: inline; } ul.screenshots li a { text-decoration: none; } p.biglink:first-child { margin-top: 0px; } /* Other */ dt tt.literal { font-style: italic; } buildbot-2.6.0/master/docs/_themes/qtile/theme.conf000066400000000000000000000001071361162603000222550ustar00rootroot00000000000000[theme] inherit = basic stylesheet = qtile.css [options] touch_icon = buildbot-2.6.0/master/docs/bbdocs/000077500000000000000000000000001361162603000170005ustar00rootroot00000000000000buildbot-2.6.0/master/docs/bbdocs/__init__.py000066400000000000000000000000001361162603000210770ustar00rootroot00000000000000buildbot-2.6.0/master/docs/bbdocs/ext.py000066400000000000000000000352271361162603000201630ustar00rootroot00000000000000# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from docutils import nodes from docutils.parsers.rst import Directive from sphinx import addnodes from sphinx.domains import Domain from sphinx.domains import Index from sphinx.domains import ObjType from sphinx.roles import XRefRole from sphinx.util import ws_re from sphinx.util.docfields import DocFieldTransformer from sphinx.util.docfields import Field from sphinx.util.docfields import TypedField from sphinx.util.nodes import make_refnode class BBRefTargetDirective(Directive): """ A directive that can be a target for references. Attributes: @cvar ref_type: same as directive name @cvar indextemplates: templates for main index entries, if any """ has_content = False name_annotation = None required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True option_spec = {} domain = 'bb' def run(self): self.env = env = self.state.document.settings.env # normalize whitespace in fullname like XRefRole does fullname = ws_re.sub(' ', self.arguments[0].strip()) targetname = '%s-%s' % (self.ref_type, fullname) # keep the target; this may be used to generate a BBIndex later targets = env.domaindata['bb']['targets'].setdefault(self.ref_type, {}) targets[fullname] = env.docname, targetname # make up the descriptor: a target and potentially an index descriptor node = nodes.target('', '', ids=[targetname]) ret = [node] # add the target to the document self.state.document.note_explicit_target(node) # append the index node if necessary entries = [] for tpl in self.indextemplates: colon = tpl.find(':') if colon != -1: indextype = tpl[:colon].strip() indexentry = tpl[colon + 1:].strip() % (fullname,) else: indextype = 'single' indexentry = tpl % (fullname,) entries.append( (indextype, indexentry, targetname, targetname, None)) if entries: inode = addnodes.index(entries=entries) ret.insert(0, inode) # if the node has content, set up a signature and parse the content if self.has_content: descnode = addnodes.desc() descnode['domain'] = 'bb' descnode['objtype'] = self.ref_type descnode['noindex'] = True signode = addnodes.desc_signature(fullname, '') if self.name_annotation: annotation = "%s " % self.name_annotation signode += addnodes.desc_annotation(annotation, annotation) signode += addnodes.desc_name(fullname, fullname) descnode += signode contentnode = addnodes.desc_content() self.state.nested_parse(self.content, 0, contentnode) DocFieldTransformer(self).transform_all(contentnode) descnode += contentnode ret.append(descnode) return ret @classmethod def resolve_ref(cls, domain, env, fromdocname, builder, typ, target, node, contnode): """ Resolve a reference to a directive of this class """ targets = domain.data['targets'].get(cls.ref_type, {}) try: todocname, targetname = targets[target] except KeyError: env.warn(fromdocname, "Missing BB reference: bb:%s:%s" % (cls.ref_type, target), node.line) return None return make_refnode(builder, fromdocname, todocname, targetname, contnode, target) def make_ref_target_directive(ref_type, indextemplates=None, **kwargs): """ Create and return a L{BBRefTargetDirective} subclass. """ class_vars = dict(ref_type=ref_type, indextemplates=indextemplates) class_vars.update(kwargs) return type("BB%sRefTargetDirective" % (ref_type.capitalize(),), (BBRefTargetDirective,), class_vars) class BBIndex(Index): """ A Buildbot-specific index. @cvar name: same name as the directive and xref role @cvar localname: name of the index document """ def generate(self, docnames=None): content = {} idx_targets = self.domain.data['targets'].get(self.name, {}) for name, (docname, targetname) in idx_targets.items(): letter = name[0].upper() content.setdefault(letter, []).append( (name, 0, docname, targetname, '', '', '')) content = [(l, sorted(content[l], key=lambda tup: tup[0].lower())) for l in sorted(content.keys())] return (content, False) @classmethod def resolve_ref(cls, domain, env, fromdocname, builder, typ, target, node, contnode): """ Resolve a reference to an index to the document containing the index, using the index's C{localname} as the content of the link. """ # indexes appear to be automatically generated at doc DOMAIN-NAME todocname = "bb-%s" % target node = nodes.reference('', '', internal=True) node['refuri'] = builder.get_relative_uri(fromdocname, todocname) node['reftitle'] = cls.localname node.append(nodes.emphasis(cls.localname, cls.localname)) return node def make_index(name, localname): """ Create and return a L{BBIndex} subclass, for use in the domain's C{indices} """ return type("BB%sIndex" % (name.capitalize(),), (BBIndex,), dict(name=name, localname=localname)) class BBDomain(Domain): name = 'bb' label = 'Buildbot' object_types = { 'cfg': ObjType('cfg', 'cfg'), 'sched': ObjType('sched', 'sched'), 'chsrc': ObjType('chsrc', 'chsrc'), 'step': ObjType('step', 'step'), 'reporter': ObjType('reporter', 'reporter'), 'configurator': ObjType('configurator', 'configurator'), 'worker': ObjType('worker', 'worker'), 'cmdline': ObjType('cmdline', 'cmdline'), 'msg': ObjType('msg', 'msg'), 'event': ObjType('event', 'event'), 'rtype': ObjType('rtype', 'rtype'), 'rpath': ObjType('rpath', 'rpath'), } directives = { 'cfg': make_ref_target_directive('cfg', indextemplates=[ 'single: Buildmaster Config; %s', 'single: %s (Buildmaster Config)', ]), 'sched': make_ref_target_directive('sched', indextemplates=[ 'single: Schedulers; %s', 'single: %s Scheduler', ]), 'chsrc': make_ref_target_directive('chsrc', indextemplates=[ 'single: Change Sources; %s', 'single: %s Change Source', ]), 'step': make_ref_target_directive('step', indextemplates=[ 'single: Build Steps; %s', 'single: %s Build Step', ]), 'reporter': make_ref_target_directive('reporter', indextemplates=[ 'single: Reporter Targets; %s', 'single: %s Reporter Target', ]), 'configurator': make_ref_target_directive('configurator', indextemplates=[ 'single: Configurators; %s', 'single: %s Configurators', ]), 'worker': make_ref_target_directive('worker', indextemplates=[ 'single: Build Workers; %s', 'single: %s Build Worker', ]), 'cmdline': make_ref_target_directive('cmdline', indextemplates=[ 'single: Command Line Subcommands; %s', 'single: %s Command Line Subcommand', ]), 'msg': make_ref_target_directive('msg', indextemplates=[ 'single: Message Schema; %s', ], has_content=True, name_annotation='routing key:', doc_field_types=[ TypedField('key', label='Keys', names=('key',), typenames=('type',), can_collapse=True), Field('var', label='Variable', names=('var',)), ]), 'event': make_ref_target_directive('event', indextemplates=[ 'single: event; %s', ], has_content=True, name_annotation='event:', doc_field_types=[ ]), 'rtype': make_ref_target_directive('rtype', indextemplates=[ 'single: Resource Type; %s', ], has_content=True, name_annotation='resource type:', doc_field_types=[ TypedField('attr', label='Attributes', names=('attr',), typenames=('type',), can_collapse=True), ]), 'rpath': make_ref_target_directive('rpath', indextemplates=[ 'single: Resource Path; %s', ], name_annotation='path:', has_content=True, doc_field_types=[ TypedField('pathkey', label='Path Keys', names=('pathkey',), typenames=('type',), can_collapse=True), ]), 'raction': make_ref_target_directive('raction', indextemplates=[ 'single: Resource Action; %s', ], name_annotation='POST with method:', has_content=True, doc_field_types=[ TypedField('body', label='Body keys', names=('body',), typenames=('type',), can_collapse=True), ]), } roles = { 'cfg': XRefRole(), 'sched': XRefRole(), 'chsrc': XRefRole(), 'step': XRefRole(), 'reporter': XRefRole(), 'configurator': XRefRole(), 'worker': XRefRole(), 'cmdline': XRefRole(), 'msg': XRefRole(), 'event': XRefRole(), 'rtype': XRefRole(), 'rpath': XRefRole(), 'index': XRefRole() } initial_data = { 'targets': {}, # type -> target -> (docname, targetname) } indices = [ make_index("cfg", "Buildmaster Configuration Index"), make_index("sched", "Scheduler Index"), make_index("chsrc", "Change Source Index"), make_index("step", "Build Step Index"), make_index("reporter", "Reporter Target Index"), make_index("configurator", "Configurator Target Index"), make_index("worker", "Build Worker Index"), make_index("cmdline", "Command Line Index"), make_index("msg", "MQ Routing Key Index"), make_index("event", "Data API Event Index"), make_index("rtype", "REST/Data API Resource Type Index"), make_index("rpath", "REST/Data API Path Index"), make_index("raction", "REST/Data API Actions Index"), ] def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): if typ == 'index': for idx in self.indices: if idx.name == target: break else: raise KeyError("no index named '%s'" % target) return idx.resolve_ref(self, env, fromdocname, builder, typ, target, node, contnode) elif typ in self.directives: dir = self.directives[typ] return dir.resolve_ref(self, env, fromdocname, builder, typ, target, node, contnode) def setup(app): app.add_domain(BBDomain) buildbot-2.6.0/master/docs/bbdocs/highlighterrors.py000066400000000000000000000057521361162603000225670ustar00rootroot00000000000000 import sys import textwrap from pkg_resources import parse_version # Monkey-patch Sphinx to treat unhiglighted code as error. import sphinx import sphinx.highlighting from sphinx.errors import SphinxWarning # Versions of Sphinx below changeset 1860:19b394207746 (before v0.6.6 release) # won't work due to different PygmentsBridge interface. required_sphinx_version = '0.6.6' sphinx_version_supported = \ parse_version(sphinx.__version__) >= parse_version(required_sphinx_version) # This simple monkey-patch allows either fail on first unhighlighted block or # print all unhighlighted blocks and don't fail at all. # First behaviour is useful for testing that all code is highlighted, second --- # for fixing lots of unhighlighted code. fail_on_first_unhighlighted = True class UnhighlightedError(SphinxWarning): pass # PygmentsBridge.unhighlighted() added in Sphinx in changeset 574:f1c885fdd6ad # (0.5 release). def patched_unhighlighted(self, source): indented_source = ' ' + '\n '.join(source.split('\n')) if fail_on_first_unhighlighted: msg = textwrap.dedent("""\ Block not highlighted: %s If it should be unhighlighted, please specify explicitly language of this block as "none": .. code-block:: none ... If this block is Python example, then it probably contains syntax errors, such as unmatched brackets or invalid indentation. Note that in most places you can use "..." in Python code as valid anonymous expression. """) % indented_source raise UnhighlightedError(msg) else: msg = textwrap.dedent("""\ Unhighlighted block: %s """) % indented_source sys.stderr.write(msg.encode('ascii', 'ignore')) return orig_unhiglighted(self, source) # Compatible with PygmentsBridge.highlight_block since Sphinx' # 1860:19b394207746 changeset (v0.6.6 release) def patched_highlight_block(self, *args, **kwargs): try: return orig_highlight_block(self, *args, **kwargs) except UnhighlightedError as ex: msg = ex.args[0] if 'warn' in kwargs: kwargs['warn'](msg) raise def setup(app): global orig_unhiglighted, orig_highlight_block if sphinx_version_supported: orig_unhiglighted = sphinx.highlighting.PygmentsBridge.unhighlighted orig_highlight_block = sphinx.highlighting.PygmentsBridge.highlight_block sphinx.highlighting.PygmentsBridge.unhighlighted = patched_unhighlighted sphinx.highlighting.PygmentsBridge.highlight_block = patched_highlight_block else: msg = textwrap.dedent("""\ WARNING: Your Sphinx version %s is too old and will not work with monkey-patch for checking unhighlighted code. Minimal required version of Sphinx is %s. Check disabled. """) % (sphinx.__version__, required_sphinx_version) sys.stderr.write(msg) buildbot-2.6.0/master/docs/bbdocs/test/000077500000000000000000000000001361162603000177575ustar00rootroot00000000000000buildbot-2.6.0/master/docs/bbdocs/test/__init__.py000066400000000000000000000000001361162603000220560ustar00rootroot00000000000000buildbot-2.6.0/master/docs/buildbot.1000066400000000000000000000204071361162603000174350ustar00rootroot00000000000000.\" This file is part of Buildbot. Buildbot is free software: you can .\" redistribute it and/or modify it under the terms of the GNU General Public .\" License as published by the Free Software Foundation, version 2. .\" .\" This program is distributed in the hope that it will be useful, but WITHOUT .\" ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS .\" FOR A PARTICULAR PURPOSE. See the GNU General Public License for more .\" details. .\" .\" You should have received a copy of the GNU General Public License along with .\" this program; if not, write to the Free Software Foundation, Inc., 51 .\" Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. .\" .\" Copyright Buildbot Team Members .TH BUILDBOT "1" "August 2010" "Buildbot" "User Commands" .SH NAME buildbot \- a tool for managing buildbot master instances .SH SYNOPSIS .SS General Invocation .PP .B buildbot [ .BR "global options" ] .I command [ .BR "command options" ] .PP .B buildbot .I command .BR \-h | \-\-help .SS Command Options .PP .B buildbot create-master [ .BR \-q | \-\-quiet ] [ .BR \-f | \-\-force ] [ .BR \-r | \-\-relocatable ] [ .BR \-n | \-\-no-logrotate ] [ .BR \-s | \-\-log-size .I SIZE ] [ .BR \-l | \-\-log-count .I COUNT ] [ .BR \-c | \-\-config .I CONFIG ] [ .BR \-\-db .I DATABASE ] [ .I PATH ] .PP .B buildbot upgrade-master [ .BR \-q | \-\-quiet ] [ .BR \-r | \-\-replace ] [ .BR \-\-db .I DATABASE ] [ .I PATH ] .PP .B buildbot [ .BR \-\-verbose ] { .BR start | stop | restart | sighup | reconfig } [ .I PATH ] .PP .B buildbot sendchange [ .BR \-m | \-\-master .I MASTER ] [ .BR \-u | \-\-username .I USERNAME ] [ .BR \-R | \-\-repository .I REPOSITORY ] [ .BR \-P | \-\-project .I PROJECT ] [ .BR \-b | \-\-branch .I BRANCH ] [ .BR \-C | \-\-category .I CATEGORY ] [ .BR \-r | \-\-revision .I REVISION ] [ .BR \-\-revision-file .I REVISIONFILE ] [ .BR \-p | \-\-property .I PROPERTY ] [ .BR \-c | \-\-comments .I MESSAGE ] [ .BR \-F | \-\-logfile .I LOGFILE ] [ .BR \-w | \-\-when .I TIMESTAMP ] .IR FILES ... .PP .B buildbot try [ .BR \-\-wait ] [ .BR \-n | \-\-dry-run ] [ .BR \-\-get-builder-names ] [ .BR \-c | \-\-connect {ssh|pb} ] [ .BR \-\-tryhost .I HOSTNAME ] [ .BR \-\-trydir .I PATH ] [ .BR \-m | \-\-master .I MASTER ] [ .BR \-u | \-\-username .I USERNAME ] [ .BR \-\-passwd .I PASSWORD ] [ .BR \-\-diff .I DIFF ] [ .BR \-\-patchlevel .I PATCHLEVEL ] [ .BR \-\-baserev .I BASEREV ] [ .BR \-\-vc {cvs|svn|tla|baz|darcs|p4} ] [ .BR \-\-branch .I BRANCH ] [ .BR \-b | \-\-builder .I BUILDER ] [ .BR \-\-properties .I PROPERTIES ] [ .BR \-\-try-topfile .I FILE ] [ .BR \-\-try-topdir .I PATH ] .PP .B buildbot tryserver [ .BR \-\-jobdir .I PATH ] .PP .B buildbot checkconfig [ .I CONFIGFILE ] .PP .B buildbot [ .BR \-\-verbose ] { .BR start | stop | restart | sighup | reconfig } [ .I PATH ] .PP .B buildbot [ .BR \-\-verbose ] { .BR \-\-help | \-\-version } .SH DESCRIPTION The `buildbot' command-line tool can be used to start or stop a buildmaster and to interact with a running buildmaster instance. Some of its subcommands are intended for buildmaster admins, while some are for developers who are editing the code that the buildbot is monitoring. .SH OPTIONS .SS Commands .TP .BR create-master Create and populate a directory for a new buildmaster .TP .BR upgrade-master Upgrade an existing buildmaster directory for the current version .TP .BR start Start a buildmaster .TP .BR stop Stop a buildmaster .TP .BR restart Restart a buildmaster .TP .BR sighup | reconfig Send SIGHUP signal to buildmaster to make it re-read the config file .TP .BR sendchange Send a change to the buildmaster .TP .BR try Run a build with your local changes. This command requires in-advance configuration of the buildmaster to accept such build requests. Please see the documentation for details about this command. .TP .BR tryserver buildmaster-side \'try\' support function, not for users .TP .BR checkconfig Validate buildbot master config file. .SS Global options .TP .BR \-h | \-\-help Print the list of available commands and global options. All subsequent commands are ignored. .TP .BR --version Print Buildbot and Twisted versions. All subsequent commands are ignored. .TP .BR --verbose Verbose output. .SS create-master command options .TP .BR \-q | \-\-quiet Do not emit the commands being run .TP .BR \-f | \-\-force Re-use an existing directory (will not overwrite master.cfg file) .TP .BR \-r | \-\-relocatable Create a relocatable buildbot.tac .TP .BR \-n | \-\-no-logrotate Do not permit buildmaster rotate logs by itself. .TP .BR \-c | \-\-config Set name of the buildbot master config file to .IR CONFIG . Default file name is master.cfg. .TP .BR \-s | \-\-log-size Set size at which twisted lof file is rotated to .I SIZE bytes. Default value is 1000000 bytes. .TP .BR \-l | \-\-log-count Limit the number of kept old twisted log files to .IR COUNT . All files are kept by default. .TP .BR \-\-db Set the database connection for storing scheduler/status state to .IR DATABASE . Default value is .BR "sqlite:///state.sqlite" . .TP .I PATH Directory where buildbot master files will be stored. .SS upgrade-master command options .TP .BR \-q | \-\-quiet Do not emit the commands being run. .TP .BR \-r | \-\-replace Replace any modified files without confirmation. .TP .BR \-\-db Set the database connection for storing scheduler/status state to .IR DATABASE . Default value is .BR "sqlite:///state.sqlite" . .TP .I PATH Directory where buildbot master files are stored. .SS sendchange command options .TP .B \-\-master Set the location of buildmaster's PBChangeSource to attach to in form .IR HOST : PORT . .TP .BR \-u | \-\-username Set committer's username to .IR USERNAME . .TP .BR \-R | \-\-repository Set repository URL to .IR REPOSITORY . .TP .BR \-P | \-\-project Set project specifier to .IR PROJECT . .TP .BR \-b | \-\-branch Set branch name to .IR BRANCH . .TP .BR \-c | \-\-category Set category of repository to .IR CATEGORY . .TP .BR \-r | \-\-revision Set revision being built to .IR REVISION . .TP .BR \-\-revision-file Use .I REVISIONFILE file to read revision spec data from. .TP .BR \-p | \-\-property Set property for the change to .IR PROPERTY . It should be in format .IR NAME : VALUE . .TP .BR \-m | \-\-comments Set log message to .IR MESSAGE . .TP .BR \-F | \-\-logfile Set logfile to .IR LOGFILE . .TP .BR \-w | \-\-when Set timestamp used as the change time to .IR TIMESTAMP . .TP .I FILES Lis of files have been changed. .SS try command options .TP .BR \-\-wait Wait until the builds have finished. .TP .BR \-n | \-\-dry-run Gather info, but don't actually submit. .TP .BR \-\-get-builder-names Get the names of available builders. Doesn't submit anything. Only supported for 'pb' connections. .TP .BR \-c | \-\-connect Connection type. Can be either \'ssh\' or \'pb\'. .TP .BR \-\-tryhost Set the hostname (used by ssh) for the buildmaster to .IR HOSTNAME . .TP .BR \-\-trydir Specify trydir (on the tryhost) where tryjobs are deposited. .TP .BR \-m | \-\-master Set the location of the buildmaster's try scheduler in form .IR HOST : PORT .TP .BR \-u | \-\-username Set the username performing the trial build to .IR USERNAME . .TP .BR \-\-passwd Set password for PB authentication to .IR PASSWORD . .TP .BR \-\-diff Use .I DIFF file to use as a patch instead of scanning a local tree. Use \'-\' for stdin. .TP .BR \-\-patchlevel Specify the patchlevel to apply with. Defaults to 0. See .BR patch for details. .TP .BR \-\-baserev Use .I BASEREV revision instead of scanning a local tree. .TP .BR \-\-vc Specify version control system in use. Possible values: cvs, svn, tla, baz, darcs, p4. .TP .BR \-\-branch Specify the branch in use, for VC systems that can't figure it out themselves. .TP .BR \-b | \-\-builder Run the trial build on the specified Builder. Can be used multiple times. .TP .BR \-\-properties Specify the set of properties made available in the build environment in format .IR prop1 = value1 , prop2 = value2 ... .TP .BR \-\-try-topfile Specify name of a file at the top of the tree. This option is used to find the top. Only needed for SVN and CVS. .TP .BR \-\-try-topdir Specify the path to the top of the working copy. Only needed for SVN and CVS. .SS tryserver command options .TP .BR \-\-jobdir The jobdir (maildir) for submitting jobs .SH FILES .TP master.cfg Buildbot master configuration file .SH "SEE ALSO" .BR buildbot-worker (1), .BR patch (1) buildbot-2.6.0/master/docs/conf.py000077500000000000000000000265721361162603000170620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Buildbot documentation build configuration file, created by # sphinx-quickstart on Tue Aug 10 15:13:31 2010. # # This file is exec()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import pkg_resources import sys import textwrap # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(1, os.path.dirname(os.path.abspath(__file__))) try: from buildbot.util.raml import RamlSpec from buildbot.reporters.telegram import TelegramContact except ImportError: sys.path.insert(2, os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)) from buildbot.util.raml import RamlSpec from buildbot.reporters.telegram import TelegramContact # -- General configuration ----------------------------------------------- try: import sphinxcontrib.blockdiag assert sphinxcontrib.blockdiag except ImportError: raise RuntimeError("sphinxcontrib.blockdiag is not installed. " "Please install documentation dependencies with `pip install buildbot[docs]`") try: pkg_resources.require('docutils>=0.8') except pkg_resources.ResolutionError: raise RuntimeError("docutils is not installed or has incompatible version. " "Please install documentation dependencies with `pip install buildbot[docs]`") # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.extlinks', 'bbdocs.ext', 'bbdocs.highlighterrors', 'sphinxcontrib.blockdiag', 'sphinxcontrib.jinja', ] todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Buildbot' copyright = u'Buildbot Team Members' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. if 'VERSION' in os.environ: version = os.environ['VERSION'] else: gl = {'__file__': '../buildbot/__init__.py'} with open('../buildbot/__init__.py') as f: exec(f.read(), gl) version = gl['version'] # The full version, including alpha/beta/rc tags. release = version # blocksiag/seqdiag blockdiag_html_image_format = 'svg' blocdiag_transparency = True # add a loud note about python 2 rst_prolog = textwrap.dedent("""\ .. caution:: Buildbot no longer supports Python 2.7 on the Buildbot master. """) # add a loud note for anyone looking at the latest docs if release == 'latest': rst_prolog += textwrap.dedent("""\ .. caution:: This page documents the latest, unreleased version of Buildbot. For documentation for released versions, see http://docs.buildbot.net/current/. """) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', 'release-notes/*.rst'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = False # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'trac' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] intersphinx_mapping = { 'python': ('https://python.readthedocs.io/en/latest/', None), 'sqlalchemy': ('https://sqlalchemy.readthedocs.io/en/latest/', None), } extlinks = { 'pull': ('https://github.com/buildbot/buildbot/pull/%s', 'pull request '), 'issue': ('https://github.com/buildbot/buildbot/issues/%s', 'issue # '), # deprecated. Use issue instead, and point to Github 'bug': ('http://trac.buildbot.net/ticket/%s', 'bug #'), # Renders as link with whole url, e.g. # :src-link:`master` # renders as # "https://github.com/buildbot/buildbot/blob/master/master". # Explicit title can be used for customizing how link looks like: # :src-link:`master's directory ` 'src-link': ('https://github.com/buildbot/buildbot/tree/master/%s', None), # "pretty" reference that looks like relative path in Buildbot source tree # by default. 'src': ('https://github.com/buildbot/buildbot/tree/master/%s', ''), 'contrib-src': ('https://github.com/buildbot/buildbot-contrib/tree/master/%s', ''), } # Sphinx' link checker. linkcheck_ignore = [ # Local URLs: r'^http://localhost.*', # Available only to logged-in users: r'^https://github\.com/settings/applications$', # Sites which uses SSL that Python 2 can't handle: r'^https://opensource\.org/licenses/gpl-2.0\.php$', r'^https://docs\.docker\.com/engine/installation/$', # Looks like server doesn't like user agent: r'^https://www\.microsoft\.com/en-us/download/details\.aspx\?id=17657$', # Example domain. r'^https?://(.+\.)?example\.org', # Anchor check fails on rendered user files on GitHub, since GitHub uses # custom prefix for anchors in user generated content. r'https://github\.com/buildbot/guanlecoja-ui/tree/master#changelog', r'http://mesosphere.github.io/marathon/docs/rest-api.html#post-v2-apps', ] linkcheck_timeout = 10 linkcheck_retries = 3 linkcheck_workers = 20 # -- Options for HTML output --------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'qtile' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {'stickysidebar': 'true'} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [ '_themes' ] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = os.path.join('_images', 'full_logo.png') # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large or a png. html_favicon = os.path.join('_static', 'icon.png') # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': ['searchbox.html', 'localtoc.html', 'relations.html', 'sourcelink.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True html_use_index = True html_use_modindex = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Buildbotdoc' # -- Options for LaTeX output -------------------------------------------- latex_elements = {} # The paper size ('letter' or 'a4'). latex_elements['papersize'] = 'a4' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '11pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Buildbot.tex', u'Buildbot Documentation', u'Brian Warner', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = os.path.join('_images', 'header-text-transparent.png') # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # Three possible values for this option (see sphinx config manual) are: # 1. 'no' - do not display URLs (default) # 2. 'footnote' - display URLs in footnotes # 3. 'inline' - display URLs inline in parentheses latex_show_urls = 'inline' # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'buildbot', u'Buildbot Documentation', [u'Brian Warner'], 1) ] jinja_contexts = { "data_api": {'raml': RamlSpec()}, "telegram": {'commands': TelegramContact.describe_commands()}, } # Spell checker. try: import enchant # noqa # pylint: disable=unused-import except ImportError as ex: print("enchant module import failed:\n" "{0}\n" "Spell checking disabled.".format(ex), file=sys.stderr) else: extensions.append('sphinxcontrib.spelling') spelling_show_suggestions = True buildbot-2.6.0/master/docs/developer/000077500000000000000000000000001361162603000175315ustar00rootroot00000000000000buildbot-2.6.0/master/docs/developer/_images/000077500000000000000000000000001361162603000211355ustar00rootroot00000000000000buildbot-2.6.0/master/docs/developer/_images/stats-service.png000066400000000000000000001647771361162603000244650ustar00rootroot00000000000000PNG  IHDR!iCCPiccx^WPUK׮{Ε3,r^d9s$眓dQT@2JAQTPTPPgW8Yѣ^V"`""cMi.n4"N@@/.Z<2$c)Y5@/wE>g%G3~>lϒ=̾8_nc sz}c#W>RRNRqA~!~q} @ON@~ !c1> ym0ڗIs J"hI>>)O<-%*!I ӠЂedb|C"""㤣bd|C"|B"eRbdeehQ4x8iȿS@ )`ƽݳ T7'@|+rg~ ޽o p%"  ` 0 Ǿ7| @%& d~"P*@u66h K` wp3p xOϾ ?C}"AFܾCAH) pdlrA>8 P1p(4(TP @=h,h͠6. .. z@/ Ap a0nQpqp>xC0 Y<s`<x ++-x:x>O3'. !$0C! Q!fb!N $ B$! H d!9HR!UH Ҁ4!mH҃ #2! l!{:9CyA! `( "((D(JҡCP& B: P TCPTAP#tjZuA=P/ @נ4Bq.4=ihz =*}>Awh @0a c`LI00+s<0 °,KҰ+: kzl%l7ÁpGp,')p:|Άsp\O%ikzn["_x|' /% g o;.(G & E!F!*uB0B",;# @x#|`D"G$!R,D"Q(B#N!D# ъhGt"a(b q1xG@,! "oMOo_$D!H"YHn$R)#%2H2R E MH+tGz!}`d82 LD 3Y<dYmF;ho:FǣSlt>(]@נM6t݇Bяk[{z0H CP1. F#a1c8a0?L0&IĤa21yBqL Si4cc.b.c`cnb1ǘg7UGW&fEbqXerc"X V`MVX{3 Ća Tl&6[-Ɩb+uسVlۏĎ`ǰ؇g% v!pX Gűxp8Q$Nq8; ąbp4\60w W5Zppݸ~uM8n{[}}a<O39x!</WīuFxs- $|>_/Ɨf|; ߏ{g&7"`DN% 2%A`B"8\ B(!HH' ń2B5B Cۄ 4)aNL$&BD Dd"riD$QJ&-vDg'џJ&&3B b9xFD#GwS'5q$2E'IJ$ >ɔdC:@ BHQDR:)TH:I*'ՑHnit4CzFZ"#}$}'!2L&d:YL"vd79N#bir-M ȳ5gy(T FHS(9ŎBR"(4JrRIP:)A(e2OY|lR0  T( 2!%/CC4C2C&ab u M .3\ce00ϰİʰŰˈb$223r1 2331122:3z31F2&2f03g,celbxqqqqku/?RlT>(ULզSj5AͣQkMj/:61u D`bbbd`R``2ddrd`g ggJgc:TTtii)"; -=f 3,,ˬʬllǜƜ|yy!+5̿X < EEEEňŚřś%%%%(K)K KKK8CyW,,XvXDVfVVVVV]V3V{V7V?pCYOV6^fd:uu+&&æʦfɖĖVVVv =l+lض;..ͮ®nnɞĞ~(}_w8DV>:<5 GG OOOτϞσ///_ __+_ |3|/aiL4^Hӡi޴PZ-VD+5:hWhI}&V^ULPLFLCTQXXa3bĮ{).E:]Hף[A8zBo_ Br1G+ěĻoOω+`ЖpȔ8&Q%,#1,@;$YKRLRIR_FC2D2Q2W c3K$w0RRR2RRfRNRR1RZ.Kݔz!.%f敖V6vN>*]!,#="=)BzMzK!(+#).c"s@W&ZLLLL̴̨K2dѲ̲2ZA9'ev^#DvYISNLNYP^\\QJ^QiEr;XyVy!yyy]ykyP#wO/ȿV`VTSQRPUHV8pZBˆ”‚_EVE!EE=EE/p4BJ>[3?+*8J*JJJ~JJYJ':)+*m*#eu=ÔS +[o+*VBVQTP1SqQ RITW)S9ңrSeZeIeCeWʥ*jZzV[QKMBM]LE-H-Ii&^[j3jվu:zzQjwWշ4,~9454F454>k$kjJijiZizhkkih^м9\/-]KMTE+X+Y@RU9Umm6vv V+wWu0:l::*:&::A::::m::wurv^ם}IYWOFOGF^^)z=zfV~YEM /5 44rJjZzFf4l4155\14Beu41Z6a2f33V367v770>a`m0v`NNNNNNNUNNNNvI4gEggwH睯;O;/;o]x]\\\\]\J]Z]LqtŹʺf^sr}sqs3rsu wr+sks6ӝn^~}}}CCGGGGmy''S33ճg}%^X/n/Y/C/WrvaYU? j޾IE ޽_z;>uPAsppw}>>>>>>>||&}^l|iJf^qu=w|_~CqEUu}Y%ugo7)Pp#`&`-`7(hXx#p&p-p7$t(4|P```Yo? HDT\  y%*jZZ:*t+&fv2%zLZppppß@GpG(DExG$DE4F DLEDD2FFjE:DFfEGvFފ|% eu0*1XԹQQvţu#󢫣D/F!ĨƤǔƴnjl"cbcMccc6^}'gWW7/_/Uv9A$A+1!,!'*+NbV")Q(Q#>141;2R$bPF}RhRvReRWҝŤdRpfcrXrNrurwW?S()))N)))u))RS~2ꧺƤ6N4444Ĵⴖitttc ?2Byo3v:dtP¡㇚ zzh#ɛiYٕy7U,,z^[VlѬsYY>f#y3+fC1qω9Ӝ34s.&k[ۛ;6o[liO^j^i^Gy[|||O??~pg_9<}x|#AGT9rʑ߂ *)d*,4..L.=ֽzx{4z\{{z{|,zg.^~vyWצ7wwsK+O7? +W^ʧ}d ` wq`xWU:]ZrOL^˻vȵk\./>r|][]_ߠйy#F͍7ol )  849a?L6n~=FFG1J -m3-[o5tmۚoݮ}11c cecfƾ3ˎێG8>9;awis]]&wrw{{B ˽wޭ{o& ƉW}}3t^=8`⃿ړ^&LM.NҙʜZ{H{ì o>|= MOONL5HᣀG?z71 )9?so,~Vbr6|lccǶc:ϛχ3gqʞ={sݟ??| /^~ދ 녨S = 6_T}2eˡKТbŶʼnŏK%%ۥإ_8_izխWo__6K7oj y / -//]XZB]Q\qZI^^z+m·NNӻw5n[ZWVWCVV;WW1YY[^GGXY{ޏ_@ C܇W?,|;>>'On2>5|vCbz#flϴ?~/,_T9ؗ_:uM鷰oߺ=;wL?M¦̦ffW[--g[ۆǶgrS?bW_~}aQqIiع[ߵG~c'O՟^vVvEZ[w3ۋ+{o~Vss8`9 @U5"A}kF2 pHYs  zTXtauthorx^ (J,S(I(kt IDATx^ `T?' N *AiAE?JԪA[ 7jmD:ԖC.AJ(2p%$,y$!29=̜y{sy={"  @qy7pDNS]t)F)+/^pBusΕsr:Iˆ,@@O"zy!@&{Xʐ!Cn'f%#Fp%$$\0LG &D7FE#Ҁ}t\t:/ĉtQ:|C*++/;v,ɓGc\\avV\GIgA!+2d *@@ ̉7xfp/iӦ3gNٳcgΜIIII4tР:u7k׮U#ppP]>i/7>>2&  @ R`qVZun˖-Ɖ$iovvSCWA @?αcys/r70go~}R@!@ t_ ~ĉSRR}Gh{ァ͛wάO @@`ԩ*y&^\᷉fS<UW]apz @=7/^!Bľl{vnZ9 @ W?FoY[BAA)SӤqO  @@F:y ߨЇ-OUBB‘a!\7T  ^lq]w|]PA]9rdΨ# @ > 4}ɒ%Nvܡ(]u=1ac@ @:3}È'3Vy0RK4 @~nыMI7n111u3;AA@pC Ot&/;z;Q[@ps BԩS_Yb{%6_n~+2; @: gtZ{}Td2Qdd$pK @7ܣ6t߾}tS?ˡ̎l@KPv@~JJJ[y|ƍ:%x\\ @Xvm/fx׻ y z @@of͚Ez{wܸq{n6lYC}.q@ ^ 16oL#G/l,p:t(}#@@?,V? 5 @ &NH2̭J+W J+V PEE;V?GM!M%N@ G裏H)E瞓[P?@k!`9h @bٳgIbxOҶmۂ[?z'1|jhhԯ!C @ ? pGeɓ' /8TǏWYYYfuAel2cΜ9_TXvr>vOB>[>Eq Ejluu5Z+wS\JkQ뵕Jy<<}o}x[Mо;k ?c We&;v)kZy7Ә1c())VzY$x>x IJJJ`r޽ZAܹstApS1@\{ @@ .k;LuV:pe4e-ДtWjL4|psٺSNi=NGkSJ]%4LZP9sLOQ"2  @Ypٞ,s>|)ȓ`O&lJ).ǖGM'N&ϑ$|.H &QX)ʭW&O|Y+ׂ_(@p{S@@x5 ?O7lU|Jo7'n(AܹsmNo*4)2 ` @@͚͒ڔRUVSZѧ]ec%ʇ @ 4?gAeJ*)R*Uoւ̔lCY$4iU;hG; @ ?Ea5b"ej<f\7ffrT9i]鴫4eζ*USZ2RLekfi;,sJK3R4ѩ9ʤy&nַ.# @ ZY].o)eotWAW2(+ySer>]YJ)s{g&eTm2~Fgg( {nf)SxMOoC @idQ(Q1NUUVJKTyUhp4M#2U;)Or-^'W9yV.ŽfhA''\vpb5 @ :p:A@\ea#cqG3-'p鬽R/}.% @ j5`wsONuطi︋n2Z6#$ 3zܻdl=YK]m! #J @ ^8%qʩ-Eަ$|#F .{Mz\J/w~q{x XAQ @AොtN==\˞KK\NpzGRcce\JvJ('3, rn/^lMbjoӺOВ5oQu}Cw F>I,\ B Pv;}osY|T^^ 4},1kͺfZCTMκl[_YN3x2,4qôa{ }DzxzPRu ݑ8_WxRJ_rɵNK︃ZyYjv/֭t?xe^OїE^̦EYkڼ+$l@ @0mm{ g7ԔX&e+P ?G'efUe8fs Uݪ̩^JvʢdmۜU]Zk)QV,6bSU~6ϱg<@ H@fUUNwpAɤLZLW*rVT2RP̪QR9/Pyi*ZJ,)'E (#_9Ue'bZ|R;(iRJ C%Lx̥~;:[О6MhӦbZ_ݎwx 5>š4u5@%|%`!3nv5U碊մВCc\M 4c X4ga:ݝFoӞ3Chd^{hͽmmd ZvjsiaЧ){cP#vpAHk9BD @؍<1FaRYy%8G==v,==َN: ~MR2󴱙rd3Oԣ ,O')QMc:y;S9{x'͓( 3k7,W9i}Ry&*lҦ2n8/`/lʂ@H Dp_zHV Lg΋,  `|;**).ac!kkaH l5n 42t>j?^ˋاm3 (:_EB `.F;n0j@FVHGmw`/}PЇ\8 @BZeHT @>\8 @BZeHT @>\8 @BZeHT @>\8 @BZeHT @>\8 @BZeHT @>\8 @BZeHT @>\8 @BZeHT @>\8 ԩS+BJNRR yd(# 4QE @cdžTUU}?ἃJ:qlC@O.tPW@BF`ԄL}X,mh(҈Gm @Ǐw}Ga^{=1] Be!@?/B>FƍOۆ6A.xT&@!p}{@,|ٳݵvz@p#zC tܹsּHxi޽ r.F*^N 9H@LP?Fu8k]Unӽ{ǴS~](AUe$AiڰK)bMtrS+,궮Sq۷,Ek.5[{q7G@BUg}U;žiݺujѢEXNC1:f2QNVB9dg1m\0[9Tk(gmhlhAAm|End[T]߶ηƻ @C&?[x`OJ}oBzzyټSK}/%@Upƙ@ϢfѬne?WSg]$=no{E_NkJ!wP/,.*^DZr\vz|J-/I?2e @1;vI\\O>$jٺu+ 8Bii7f-Q+@2RKM/51)s^*,(P9*=%U9jͪӚf"VL*UU$If婢leD)Z=7M{ϜW_R)ىJJQ*{o-fJJ{,uA@ gX-'NܜR{y6L  %Ǯ ?H+)?ceJy zРAϝ;7; P4L@&7dMC]~UdK|04Ghwk#*c֯ߦ=zfVzj=jZ>=t{,z/VBZʀ t&r:Og?7o~/]Ԙ9sL9֏/['NODD|y+;Ap,סoRc%>}.]'Qٞ 7DRn8@p5l@!"n=\%x g"){=8΃X D9Q*tuB9P<ρk<.~$gy|d96)} IDAT .(#$ύn 6,. @BZ@UA-u߿_&]ez$[} @ @@pd @\} @ 2 @  4F @ @e @ @F@pi#A @ c @.G @ @. @ ] D @b=8<8p`")QQQq.]Nj/F_p!J'MTUQQ1_~ .~tEGG:|s9UpG9$y2%P1Q @@u}_2dM8άK qĈ &?&L7n58@X0`@{\.9H#yyq`I'NGÇ:tUYYyرc'O倴1..0a;e+:J:v(@@.uzPm@ '0'&&fjhh=CMvvΜ9gώ9s&%%%СCxjSR7(#i9uTڱcUvm۶qy_rPpǽ[xm(Y  Dp{ ef9=ad?\Vp,-%Wh p/2z _ccFFF\bbb'O䀓vI|wguԠAs7=Nu!/!D@}Zaw`@FSAcgN;w}GDž{8ޣw}|m>c8˗7706t|@#K@z.FKi7k, `=|M?~ 駇%<5&V2Z}!@~8w;{o{89ضOjN=_`]Ba' mO*#Ep?(@H gXĉ7Խ}:lV͛74;Ϧ`ުǏU(~ 3@A8WKٳgӊ+Μ9"Q  (#F}y'tcZx΃@v& 8@ AoK,qvcdŋ|O̥~8v-(,2 +X3}=|q3PBOņ1A @Ё@ttCvTUU[<3av\@|#ʗMi(==zu@ cA SN}gAM[E//uZ@|$4ӞdH|_m U @ Gm}B:.=CuTa%27 @/JJJ|Yj%qF@:@pB Zk׮ 6Z@.}|:@(?d\  76lQ" 7~E ]`Ĉyf9r$]x H.\@CO?T{G~j @@ L8d[oV\5OVXA , ;v~*BK @҃GRcs='Ӈ~HB . r @+ bgRmm-xOҶmۂ[?z'h֬Yrjhhԯ!C @VpR-@Xɓ^Pdǫ,e: ٲeZl1g/*,;m9X___s@H DdP)V@IK"9A N#AQ]]]Mod򟨨(Jݔ$millzmRϟ?O_|}[ߢx8V#"C&YP쀺[/Z@ XB˖رc=MA^ӧƠׂoƌCIII$"IVRR{ Ν ˞!?'2xs\{  ]رC *++i֭tjiʔ)Z)+Ԃ3aD!ˀ0c'A@pC|\.}jDcgX>|)ȓ`O&lJ).ǖGM'N&ϑ$|.H &QX)ʭW&O|Y+ׂ_(}( @O艅КKIʧHrѮ>)$^&zzr:D2x`-Pl/8p 4)|X @hKUۛ5@0},ӡrS3+%KU)GM*7ka)2۬CY3\]hKiqP+ ء RjG7OOCFz +{ [h],PMk/$|TV\\;U+i>_^\MyW6 Z_.\N\ZATz)%j'╞ iFA:@e>x?Ks_mr {0K=_po)ͬUȜ7"fk=fR5V-NQ*eשUbt~/Ypv|e-p?'J޳ie=<^SٟOP_8@e\9Г5@0@?\\[2UW*(shf's)D 3 ˛bIUd2;L+k󔵜p:TyI.KkIQZrzrVeiY\3A V=Ւ6'|CefMhYSIZ>]z  H0}{@W CSU2U^6s8M$?eegN~>5'Or\{)/`Sv5]^}}|}]^VC\M,թ|*tS /1SHm[ea#c[J F:Ð4j 0Gm @r2@N8 2gǞ\/QYI>>7r Kpv] R @!@@ri8MTv"ٟH)jv󃴂@@eo @*\8~N.N_R/mXuu>frYֺ]iiƌt+ik c 4qE @( jN/q92eP铢d\JvJ('3, rn/>E]/F{&v[K]6 ~zBs',/ W#zC螀\+{ٻo$WSb: )fi/sU6Z UYyYiZ^PUFvv 9Y~kl|dUPʯʚL-Fvf.Ig5j޿)UZM4W5,U~,%2zţ)vX @X[y>Gy#"Tro.MʜW TA~JOϨd*,N;L)Tv2{)ZY5U]9 .-6(%gh@ԜW0{)HUx>-*Ηms"/5+Of{ӶeMUښRv@-6z<WA@( i$@qМ^N?L&eҒ3*]YYeSy JQ $4UDqw/ۡ2T\XR8O1#U)3lvhFV-.3 h @ ~:[О6MhӦbZ_ݎwx 5>š4u5@%oqHT?d&׮f?C\TQZr`c6)!f,XAy,|6LӈmsflK{-Y{ p0}hMnXt-lKSL?{l>&DśB&@0tq?k, ``B-Iehb0 d;&;,+T5mq, } =]/u޸qIqK}QOp?~@z+p}{:wpU\x%t4P@t%P]];wܚzze_~eڻwy¤P+@%Z$y# ヺ5 K>Spď4ۆ!T\; @v@}@p=l @ B6Xsܸq<j7B.go~8t?>A8B| v @=@pC0d 3= Ӹ'󝨨 -[?lBiUIm> 9%<5 t&3@@z b oÇ7yzڵԩS 6O<]>iWͧZlnz:_B U .q>@0^!|X9cLLӧ/wul8p@I_x\s͗PvI8I;CiJ hw-VBX\ZZ4I4V@a- 1s rWcc㵜N69sbgϞ;sLJJJC`s+qK<#.v׶m\eee8/y-a Wp[*rtR@8d؝\z ``! ׯW |СC.cǎE򥮱\FJ >}.]'Q`g5lW] JCз@ 2nqq|DNS0{P_#; @ @6$X@ @=@pS1 @ 6.ې` @ TeOŐ @ lC @ S=C~@ @h#f V@ hxoI5… ÕR.]{~uќ&MT]QQ1>22<~5߿]N^on_ IDAThh8WH5A@p\ t&0nTY >.X @>¬Y. ={vDRRI=8/rI=tܿH~|f"9rmᠳSg3<@a$FmESW`7Ң|er 'EzN3{rps]q{!}_%IC %}gZS >} ] bb R>HI^c w=-s{%3e\yqM6wI^>@ׇA"l K-`a1UV]ύMviW-ɇ 騋%F= t  ` Q~G/imv=A:r1bW _N@K -`++b _z:==ݹ{p)۴}׮]tӕơ/+ؤ/n[i~4Чj~84nPȗΐN^jj꜕+WMHHX}Q~ظq<н "Ϛ%gzAPwf_`BJ@K} ȱÏo|^X/I @0_  ,֐ VXZ|y<]}3Xu~!  @"puLLm?}ÿ߿]\4"w?HC߈#vpr068P& @ XCq!Ҩ~$Dj@]D~3]x|;ax4hĉ_ Nf\q?ɧ;@a}K8+㳋g7r  v}݇~<k}ܹs~ arKfq N2nh [\R ``=;g͚E۷'?_ضdmCR*mFJHHoe2}twD=o)>>~ϱchjU/+b\JpYmh<_ivI{dC;pMo v{={Z%%%lhPOswpo諾>j ڔi~=Mt8@& JR&`̝w]Njs3/~ics8m@w$ʧn`)L0̜9< -\VAɥcK/ECW/"ޯ;vJ4&"oc ȥcoĶ}7|3ŋ yV\@覀Kat ?9:jHTBF|zdѮ K'pdEpS1 M@f(4 !~ߎ}X-Ok}QzSxzӏ0W@!ЮX14Ltk=}öX@n7"†1a~M7E>l2,>,EI.i… ̒Ya{\2U6\v Y @ib5̙3-hŋ>}ߠb݇Lc=&=m'> LۍKnEԛm s% 6W\q%mX B_:~!FT4|p /@r1 O @2++Ow_mtGew.I 1oϏWU:p͘1FI<.0ڢF njCc6*Uuy ivޝ|m|rOmܦ+\5@ kGtYr\4}t9k,DNS۵@o a~@8{=***hĈ;m؃.{ @O=IO~ڲe :Ҩ;b O+a6ĉ@goQٓ}oHOi^=C~@ {W}{GiN4v>f?كC?'77WEW)vKϝ;yvm!*vYP[D,Zŷ{&5|iH%<|0Ⲃei0y҈ evw^n͂(vzv?=GK}.`|C<ͦ}<IW^y%M:U{\tdvW'MroIӦMyܹsS.ϗրNp&뫯iwjX^RX鱔r޽٭kiyۓ|0^xRgGdNq_N?R w8iZN>!N6wRVWo!J(GpGU!ٔI:  ܦLB\s %$$hyylll<*}cG2/-` .l s=\4g 4hL'OVc=OL#c,RXXzq+ݟNhdK̜>$sN9uC9p{ /?ˏ:]Mݝ|R/9Up?@$ ?Z/2 Çkv|IP38111dڽ{7544IT6L)kU2nRJ x@|&gc8&i.SpϜ<GsUp}rS񜬜Z.Og9I%KG9㻜nlIzFe|<t&P[]A牆L;ycc6+m{Vn)s:SBu!<ϽApPnL'rd~AI4 04qH/Ŷ\sINY> g?8W -r釜.ңQe@Г@&ZZqsS]R}R 4I> @0KcO7\~`ʥn'0E.Wu{=p^魔G+SE(\ \NrrpSo|xOk{x5@Џ@.E%eaD` ݲd -D S!Գ7Yi\TeѴ#T6K4@r %x12~*N7Y.Cmwy7n^I&zr$)9NWޓe(m<@0fjG7OϗĎz +YٷҵrWjZ|6QN߱ZIyjݵΛ8m:w^prڥEūRV~"-^)iK< Uha|Ut (v|qUo'-~|~ٴ|%>^_ nt\&I{l#^a08mKOÿR֓9gRm^^P>\uPejc!!mL]Jן %?w|×KY4oQAшa֣=RRo*We{4a s>$4"_dq^ i 9>< ?t$|SV.$Hy?4bJT\:73洁>I"p!N9ɠ[.R=aO@͗cId?[7Tk>g3k9*ߒRL-?TAKtdRɖ~ZRXe7ɞdV5^z^=*Sr.P%|UQR,\*޳T)3!lm*jIʯmz`=lyťũTe3(okW{JKRL>g|r*,u[Jv Pi9=Og)>UxR4w~9NLMUlJEM-T)%C69Yϯyտo=s\  ,iNI&oNP=*KXʩ?^達E7s'٦N2;zഛN_ᴑM9uOb `T.c Qű(zhE.m>QSEJ3EK^]a{GH%8=0glO~FN4>'+)MЊLN\<٩MݦZ&8C6yMֺb.?>E~}Y%ʣ|ߢOYSiُnS{5SSn+̦ѭ3jls6E![-=zk_>M3r xt#uZ .붮nW1Vm+;_KncVnj{yU\|*nVZOKO3f̠;^I[%S?q2I.A}Sz,gqJ$=rܖDz5'DKn?"'RJHN2A?9\:x')c0')G>+XN:oc@ Q5߫ZHmݑ/")onZvKFAx(D7R_VIWuӹ_4;X &ಱQ~0Qοzn+x2߳6t18[9Tk_r) ?-o]~B0vze4 ͝</9$ =y@Pz:Z:'_= peoy.˱;˷?.]>Neqt6/r^{^u:~\:_7o]Ι hFDvk;ke]\VI~RYy0HK{{2όJ8_/y5H6A)iOCyd-LKKS;v 6{Q#GTMS >\9mt\V/?裦r%.\yN+ͦ֯_VZմmqgp97Ы; G%"%Mm)Qw]&ҕ|/)%I֥gP(GwT3TODx"Md*G/9IeH~n/kmu=O}aWi&UIf,J%uӒ-骨islQ.Y^^TgGnTSf +^d,=՗l*+Ue=[m')+SeeU]he('q8yr|g}Y 3 v5]߳v'Ey ?ϠNsG\:[A$d"|)-|M GRߣ3Ͻ3޹FY{$&\r b%( `(X%[MZ+AE z A.-5^BBm%5@% $\6$I<^gwaww晙Lv3v[eV\Cz~ wq)R~(+<ⲻؾLw5.]\ʗ 2h˿ʘúe۫5pf.zu5'U%o\'ӀEeoۿPs^Y[Ȕ_v1MӒWjRTzy'M_מqp8yt@G.nUEs d<(ZОowm k~k[egaӕۗN[J_uEs }Й3B\_ 1hxRlf,-sDKy/(T>/WA~KP 8JT2Z1 83 Є)辣v*sqtWӑَEq8ם(\/ |׺ p[p=}&ڶimTDkW3榛4ﳿ/5a+B7::bccjijE 띍gy?ɺ&]AZ(0y2Pxt_G`Zu0T{$rd)'K~6M.ȤG[&*Uz1-ⴑ V-uv҉sCʊ)/k!bZDx-MXAXZUJyܖ/XzKa鸛^M㯼-3V5qRV-[EOrˤ5dpsW@L5_3V}Uc\K O?wx".'I]ZRvwǒx(oO+OG;ؾMTFűLd~Ǿȵry1$ٗRink:Z4/hGoJ\٦u8qL!_Kw-߈,u /h$e7nP byR-[LO<ھ}RΦI&_~ٕ]_UVV+jԩԚ5k]wݥݓe?:*گפc1,Wm9$3r^eSgFUz>di7'=:sJ=U{vin={2 UN2̭ҷrN!r4,l1Z-&;xwm)ϗ=SjUSDUȉٱ[˺9afӕdUr1hwDlCFw^7N6_b;%:ʌsϥ,[om?{V2G{cY=ݻ֛J[[(7gDL+ͽ)~Ő#O)}oiN OC>cR_߿Cl\73^3^EviAyFuJG:o-0\\ƎgVqkk8G;_P^Ħ3yZ2F^T{Qծ}m}W⥲Jʗq c=p-G).;3΁[sؐu'v Fs`+ko>E%-)levυ$h sN]{B۴˶ {_liglGBVy.OmJsNWL1Yor(,3QForQz[`vN|v⳻UIEeiG*`wg:Om~gZ8Dr{ɴ%gvnw4e{G([d"{~oy{փT]%}Jľܓ~gl_*WV}g !>/yB76"H_kDŽ8kC}_no2v0QʢŠh:${Ztձ:lgZv&yi FyG?@-r{Į>y'йv'm24L쎘mIM.[QtA</[m9*B; 0ofbLlyB:gws:9F|tzx+еzn?jjT ͻwFs'pÍ\57rLxyq{FLLJisC=.n!Ѽ4k6od*J=c<>MzwSB(J^~e2sːs!ݽh:QGic|NKtq/(@@@@/2+;1do,⬴5c)}**h bյGQvtg$<.4J ).-{glIO֥;fx|Δv%J;}'=n3 Bj 80EE9ECc&x2=x\zzna:4#Uʯԋ> H#~M}:fv2Q0KHN$+ʧs}^es`+<45I*2Umfۦ>Xɹe8r49bz'OžA$ 8h`?4[lgo$GYU5|#_:JZ}Ud}.|o cQLkx.}_WK{00aL{py\fG,6+:mbah#Ts԰0 Yvqj<.\wb5*K_F9    @`'W2ݺ1q4cBo ,˟ٍlN3SH-6GO|V.@  :Es5G% 3"$; 蕀K`Q+{ͼ;i޼`=.\0Z~[ X )U9f9߶6U#vRӚUK3sijݶ%dQbΪ4}q'|  K`o97- ]ܴiƍTPPMX֟4hl3S4ٔG%P8 d.Ȣw(02вԵD!9E=PtJ~;,4ko&O\A\5_ɤ/?FymΤSdnWhoϤ[}r+Bѧ:!p]~ݑ9Fh!?"w1}%()) Zvxo߮F}烪3szz:ruc"KjVIhif^"?O)ղf%f)PU2Uc1q>ʯߴjLʷ,D6Zy96e1&(Jjo feePlK z͎0zoN񫗝V|0sh !pڱcGKܓ~4[M%p-QXSJ\Lcldߺ)lr\/ҕW/?R7̘e٬NⴤD1VV9r IG0X0nRo aB:\EIäүҿa\4BHZ,.\GDKWFvսC TnQMUOu(G7Оl#{{+9왴tG)G@@-_{~goBԩSar @ ZʓY2TZ89> Ҿ:<>\-y h ET<=][8ʌd}%"@@I`̘1׊z374&_t=Q9 CȈޖ#2lӖ>H猥 m-MgڑTu>5$bͤ9ccJ=CdDp$4l:1:pd=n4$zSd/:1/-~^jV}1"{eTW ZAme&zsN@U|oKGSLyoo͚5Ug?ܯ=S#+PMr/|G "\_[4<\O҃uuI1Q ut,K'FmUS&PL7 㠛G\jb9ʘ20sW aN`Ϟ=iݻ/2D; Y +3G}-P;k Iy޽aZ43nJi3fLmQqh5گedG)e59b8r齶!A@cB-h ϝ;#^я~o3g$I! +{dc),:7_A.$@UU>|I-OL"- M|UXfGE`Jyޖ9ŵ͌|C2t-(//~zd۶mw_}g0N\ /ǍWXv2Xv˱_́RoaÆ}k6@ Je1+e%|  /,(5OGt/%蟾>y򤥠`^S7ݖ{|qMAp# 3}*GP&@cxֿ曘RbO13^,At$q#L+e(;;BXG~w>*ŀ@>GvvVQyާ)`huH&r, g(A1mwѣG裏_ya6Oqt&ײf_Eףk< u7Gқ1sM A\M諎j)}'} 'W\qf?Ν;ԩSt2${ epʽ@|03VH |SL9EEEGnl(#f.@سg[|l},lK/./K%P(@@\ 2 3g<,x饗hu Jʼn$q#QcI IDAT^#y0qNVUUNTVVxIae93n<I@5ka㸼Mm~ϯqD%@ @\Y aG/b;_|w}g:Wy>S?lİauR9^tn#̅,lEʯOr ~F+eu  h0|C7|>O5h6͈#JEFNH艸-(,G,lo8(E(sYѶ•/!tE+: O (BddwYT\p7O?]vДvJ{~p\P?3BA;qc5;99ex7~ETʽMo K`!)!` rx9QF'?iCJh e}Ni/dlx lt'.qr89q\ȭVWqqc8ҍ8|Z__) /rʆ ;uԇzaî:KG}PGЧ~ŊИܞך غ|E GY?_qǏ9^(_9qqcq~-a=>@\@@@@#,}s }o:t'?3f\馛'i̘1=*D6믿;v[6@<3-\?=(6@ 4rE@RV aeO[p|vOULf4ǙT˫{) @4pXI 466~| 9,D>}z8[<ǍG&L%ώ!C̙3$BxY:|&"ySGFr/lkv>*j02tI-}_(3)8\Qf*e/,w}9(dr=G~^ 8VrULnՑz @ ȲpN#ee_Ƴ̢3p9… Q#ϟ?yܹ~%8pـ̏Mد_?[DDqu,`-BD)Px s23 [N&'9q9gYw5_s|F A@@ t;KOI!]cEn("ۆq{ܐ|j>YBڶA@@@@@zD@|EJ+m|%m/zY>Cf!qq      g}F            }&qg0      q1      g}F            }&0`@@@@9^x|Ȑ!9N4h.\TJE#Ν;7H'LPUQQ1v<+oׯ_#?6FDD8{lÙ3g9VCqDyˆ,"q\ڂkX]3"G8zsa ƨciԨQh=Nhll$6HGF,,ѣt!:x4VVV?|ǏG m:tA6s%nAKA:#q\122655MaSL9}7F͜93jOÆ ZU)a 'N+?+KJJ~'4]NeZʳ[9'=$   tVr[  C@1pSGSN=ϟ}w 8^1ŇGQ  }!qz  O ɓU6'U:X~̳W^yO|4j(|BOJA!    x믿نAf gϦ3g k3;7",`{la@@ D `aTo_qR/L]4i~Tޜ 7@/@\@X5j;wFo߮FǟNj WUȯ C    W_}G/ 2fΜI+Wlo,!F2:>Ng,<þ+4 JzG骫gb. 梉 2 .C+4&&?OQ>.7,{ףظh0 !@2:M=o喑/9|Jw3"22i1Z M2v:dɒ~*>,\rmaX4Bet"   {'N0*<Ŀ ?N3f̐F~_Qz]j0[  C@xpqs)j̘1*##Cw! lݺU=coQ=e-ջ珱2A ZR /&7be k  O"FDQ]]]MoϠAŕ6)Q҆ZhnnfmeRϞ=Kǎロ~ 5_?k_@ P&qցBť{>|x%DxM:UͣK/I y$mX,/69E9sy .=% ?c}GwQ@Pˎ}g m۶Ѿ}ҤI4)+ę81b8|zĉڌD7|C{ĤSJ]E@ʹ"*OK= .u# >BLA@^\vħ9<y"Sh)DFFFjKm%\BG՜H%GGb)j蕉'Rĥ.O`&a@@@@hTr4gޢDlhh ֡p)!::Z !CE]@@@\ʯ(@@B#tO7k,*=gJU5Q*5rkܱz1P&$$݂J/:Z{|ZKd)"kMYrdZB)1&Oy"BAe@v *   va4oC3銋wZi]֯H!ٛ(q T+wRhU4(]hҊu"Va{~|}ev5jh2OW9r , FKA“,sCӽԪrKbSUvn*)rUSy!٨ KT1oPY%8?S`4+UcԌ,U\U:ɤ,%%*/3%B^k[cK2*s9Qv$;Ya){&y⥶7+ܷzӹz0  VrCG  C@߿WBz]\SJ4~%2giU , EY-L/w\`T5VL)uܳ*Kn2^MU[]\3 y7=Tfh5,HXu)Q"w]Uo1=? = Ib_P+cҳ8uUD̦8? kx+"!fwz s* '{G5Kz,EJJf*E9RFu=6WLr-M2ma;zUY<:Ae0@ZT7R>Gr,١mx\N~Q)RJ-bogżJJlѸ#c--+"e5SfGw4Xv䡡)&g'VgB}*pHIP*T|B'Q }ߙGi\ZƃhZjlk#hN 2ENu=Lk=45hn~hR|,gR @@BtGy  :8stc+{ Ũ4O%oɟcI~Х2x_%%-Z$Ϲ Umʹh׍f2et;;3!9M%ieaծلgnŐ}xĦԧ~<-A@IC5@pn}:6K,8>щMKa*+UeZ.<䧼LWYS+6c䪬3[4fN\Fjq}zdx2@( ^E@@X NtmEf_4S@2wX>b$ͤLrD ,7 G=l@Pˠ>T@@@ zGSrLIoɾī9-Yb1 ^2KSC3K)y,@< 3&|A@ "$;N#gmL(r2j{g[>op|gzaYxFmV® 叿Ҟ}R^ۇ˿OYrgikFG6;F ]ޫeSyy*/7K&TQsN[IJKʘ3s( 3YU3Yj٘LZ]KHҗd'k9ʜ'#&>fƬ|k,B.)#Gg*.Q[tϞU%t Rt́2\9C5@XFu~..s`P-:?RʦlU*Š4J剠4UTZnøU'Ls9MJϻ\eRT)#N0]1kĖ]\9lي5i4WiB6DLh/.5CS.ӈ10ջ:Ih  @$y:p@"Uedz"#'i/e;e&f:ą]2zZOO.8-5m;is%/Lr9J2mlIۊ[ 8x7%z&X Η:Z$µa>**F__6Үj9 @ @\vu   B`,ӎu icdzs2嗖@RYn*hb;9666Rn{jbٙKfU0(`98yrv1lVZrJrBl"( ]su 4SZ4%3-Xq @ @\z}   Jpm۴6m*+"#O;.*j>%s(~jڳKdH3_dѮF(ZC),yr5M֢Y)Ӷ0o1%==Z~_ˬd;:t˃imZ֘LJn!Eg^y,)&Z=`L;  @@X g  C_{zZxnP9m$Ǿwٕfwcན|ϕ?1=Gۛi>Fv#*_?iwcQi=3].ľSXC2q:浧\e%\ex&%upʞK8 ?,@@=@@ t{?CyVUAFY^zDrUUM%lLWոtAӓJwmce@@B@@@@ Dф)cܖИ 1SzXЛĥDa@@@ lTWWnΝrrr¦;h/H_~0) }c3|%ιgZs.?!VrLnBh+"ϰQF.**}Wkw^ֆVp IDAT>^.̥߻v5v̝wY;>t ԢEYXdK?G   !E&>[y`sHGIMM=k47߿yGŢ @\@@@›@ee>C=+a۶m!C+--}b^!@ NA@@@W_}̝;i7Qbbb=Xnlفw@8AsI~;^EFNѥ x@0)ƍ{gРA233m[n oիW3y\ᥱ0^z$Vonhlxd-e@R  ` `_Wڹs'CsJCyyyE]T"sW:^!K] .GA@ X ΉnssM275s(,@iذa~;a%JJJ?Ʋxv$/y-e\OV }K|L8@ @@BU, _ӿk/b'?>g;m^zyv3`„ QcǎĎhԨQA,vH읕>ʵgѣGСCt|vg#ry2ǬT;wnǩSy…ϹO%I@z10|C\b 1p or9/s"sijCys8Hޮ%BT$YVձ hfxk<qϜ9Sα# 'Ϊx .C`(      ,]ػw&>xDG@@@@@@: q\ĥg@@@@@@A%.붭!q<:Nwuд֗~6)K6jSm~t+{Vn&e%;vgП+      P\1hVQ}]FkY#GO'!bY@֜pݏpѡ)&gcJŅTtr~4m&ըw"}xV      H e=}&ڶimTDkWOYoes2fHJ(ZO3GeSҩ&W~17J{?RJd+y1x|~.aSK###:tAϝ;7B)5… QϟHqЄ +** 8,7 0q_;vMMMض,rK H@f'6RuaҘ8n.b4&ŰAlUj.{[k=pa]7EA # _dB3l"=f]qK.i7nM81EcԘ1cQF~:|#GX67 qx)`4%tQP#2Ժ7큸 g  "xo3YT__s3f̸=s~$"g(+<ν{RIIڲeK; dUccVu]P/~ƈA=L eXv;  B diLaqv3gHKK;ͳï:lX\ҧ~E >CA@\n&jMJJqժUFCgs}vN}9Ƌ =Kr!.CPOX`!y'^z҆Nf?y}dd,8xF/A .}<0 ~A@@zD ·#jqq93Ďl#FXTZZr{: ƋG5Fb&,ɓ'/s<a^2 T]]=xԨQPf @0sGT4f.7P u&j_9r$z#y'FyvڴiWX,$eTyA `2{h߾yfK{yڳgM@\2M-{1/ U,VX{WSP,@/`Yl/b, ^E@@ lk&Ng. i7tY\^xѱ 0I3/@vƎK ! A@2: U<'Ow~fGB@ =#q3NH   |g~vQф%>K3F   !J_іF70))6y{ ~OS/XIoYy?яˠ{7`bQ@ @\ 7ŋӃ>Hi I?1pG.YqZO WߺA/2 kO?QRR=s4d4=4hhh>#z뭷h鮆y;KhtZ]9r9?20f] @%_E\ k׮U,,UVViwZ=zT=jС꥗^ :Wo{wzxH_bC1优ԭު֭[l6[3g4r5jZzuM籠wm     J_bKQYTTTTT뮻￯:ԣH+GA@@t&-ޓI%%%_jb]M4*=z4>NWd|۶f=~Ht1oʨo}fΜIr;q\#qx}xF $eWM޷oI|8WMNQ;n8MTEeԇ#@Ȋ j8K4tpss3Hl+e:2)4""|҇Q9ϼGN{Ƣ[AfTP[{e:2qwݿzoD_ K,xIYUv e)R֚*e5JUbf1iU&\F,nGt́ĂTX2Hڈd]R5)1=Prɮk< De$%+cI%rZJՔ橴D=!QsDCi= cJN6Rܳ)sV2heTaKokV=Gz-!p2itzmU9v񘘚sU `:CMI& , KT1A}Y%8?S FR5f-NRUU(LRR2E&B^k[cK2*s9Qv$.\9|ϤK4nTӱoGc:ڃ)" eڣ\U"`r1Y>KL`)jdz~HIV"QX25Gf7%Xyym2^MU[]Ҝs|>{|lH"}V̫dqKޡ;2b*RV3%lhztwMCeG:jrv1lVgB}*Fzl_̦{<,@x GA@@ lFcϥwō42ڸϭk#NRb=_wb_dѮ:"z~ D{hj^N ZͯNMOL&ZnLKQSi!*E} 5IOϹl]J6𫀯%*     7swt1Vj8P.*5=-mk_&#LJx2et;;3!9M%ieaծلgnŐ}xĦG~ɶLK=tll:.lzM\ TUY*--SU՜ju9g=ee:t\9YfXm'Weي=iӡe3_M7;./W^r,’.4n pH"4xcG"/)V;,1fRY&9Y;n+R/=:mu3 5\`% KcY;{ S%(S7szm<2ٴ19H~KUG'"RwFp@ .{9A@@JeSq%GzS8:ELy0BI\zŗi="%oEm% .u s   C`DN1%r1ӻZ\Ki"֑@@Oz҄-P",)*efYG#{,nцUeAO.rM+/ЁuVs}7f3-huIm\LNcRFJ+LTR!v]oJE)    ~UQ*9:ej,[A m.Ғ*)PVz,XL{gwz+RKeefdRMM#zT@pEh[WNNWc1Deq?74Kؚ[fXcT + iq]9ZZ*-3_;BlLP VnMq6a2JMOjSMy#Fl%*=)I3.If)37$dV_s$Y͜&Qwھݴ^-rVMvqiPƜiShb7      ]T'%&A (\eSbbP& m*'`TUVJtie[V2bJ4ƴ\ d2}FS2tb..lŚ44JYŸ́v3519,T֎4%z;.7A@@@@@ Z?%5Zt*J?mVDRe):1jxQۭOU,.\ 8~u@("SƤOumjH>ԾlDnIC&Ӥ)b3AcaFgs/eX:;ߛ0̽K/{[ae,KF=-lJrxqRFQvL1or܆B͕#=o[x[z8@@`n ^lowU%dj?PCs^EމPG)@yU 5?T;ſg?W0Z1#tԣ?Чg49sq=n6jtY#2 {KnKQ,Ǣ-F^9}zthh݀QW^1ӸRی@@@)'f+) 39Rx 8pIDATߜ~I+,+H6ӯsb|*kk=oLԉI:u%Mr"b)Xm{d$ƫ/ #:ٝaqib=hay]IΝ;c/_ASt;v숉>|%a"k"%P@@`_Qoٲ奭[ݻ7kͳآTڪ.^8vڵ_K$["23S    \J.I4ZrcW\qp+F;m۶ܪ*Oyy*))Qr