pybit-1.0.0/0000755000175000017500000000000012146006064012536 5ustar neilneil00000000000000pybit-1.0.0/setup.cfg0000644000175000017500000000007312146006064014357 0ustar neilneil00000000000000[egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 pybit-1.0.0/db/0000755000175000017500000000000012146006064013123 5ustar neilneil00000000000000pybit-1.0.0/db/README.txt0000644000175000017500000000036512045012715014623 0ustar neilneil00000000000000To populate or reset the database use: $ psql --user postgres --host catbells pybit Then do: pybit=# \i /software/trunk/packages/PyBit/db/schema.sql pybit=# \i /software/trunk/packages/PyBit/db/populate.sql pybit-1.0.0/db/populate.sql0000644000175000017500000000302112107360611015467 0ustar neilneil00000000000000INSERT INTO arch (name) VALUES ('armel'), ('i386'); INSERT INTO distribution(name) VALUES ('Debian'); INSERT INTO suite(name) VALUES ('chickpea'), ('development'), ('illgill'); INSERT INTO format(name) VALUES ('deb'); INSERT INTO suitearches(suite_id, arch_id, master_weight) VALUES ( (SELECT id FROM suite WHERE name='development'), (SELECT id FROM arch WHERE name='armel'), 0 ); INSERT INTO suitearches(suite_id, arch_id,master_weight) VALUES ( (SELECT id FROM suite WHERE name='development'), (SELECT id FROM arch WHERE name='i386'), 1 ); INSERT INTO suitearches(suite_id, arch_id,master_weight) VALUES ( (SELECT id FROM suite WHERE name='chickpea'), (SELECT id FROM arch WHERE name='armel'), 0 ); INSERT INTO suitearches(suite_id, arch_id,master_weight) VALUES ( (SELECT id FROM suite WHERE name='chickpea'), (SELECT id FROM arch WHERE name='i386'), 1 ); INSERT INTO suitearches(suite_id, arch_id,master_weight) VALUES ( (SELECT id FROM suite WHERE name='illgill'), (SELECT id FROM arch WHERE name='armel'), 0 ); INSERT INTO suitearches(suite_id, arch_id,master_weight) VALUES ( (SELECT id FROM suite WHERE name='illgill'), (SELECT id FROM arch WHERE name='i386'), 1 ); INSERT INTO buildenv(name) VALUES ('squeeze'); INSERT INTO buildenv(name) VALUES ('wheezy'); INSERT INTO buildclients(name) VALUES ('build_client_pyarm01'), ('build_client_arm02'), ('build_client_buildbox'); INSERT INTO status(name) VALUES ('Waiting'), ('Blocked'), ('Cancelled'), ('Building'), ('Failed'), ('Uploaded'), ('Done'); pybit-1.0.0/db/schema.sql0000644000175000017500000001642412107360611015111 0ustar neilneil00000000000000-- -------------------------------------------------- -- Generated by Enterprise Architect Version 8.0.864 -- Created On : Monday, 08 October, 2012 -- DBMS : PostgreSQL -- -------------------------------------------------- CREATE OR REPLACE FUNCTION make_plpgsql() RETURNS VOID LANGUAGE SQL AS $$ CREATE LANGUAGE plpgsql; $$; SELECT CASE WHEN EXISTS( SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql' ) THEN NULL ELSE make_plpgsql() END; DROP FUNCTION make_plpgsql(); -- Drop Tables, Stored Procedures and Views DROP TABLE IF EXISTS Arch CASCADE ; DROP TABLE IF EXISTS BuildClients CASCADE ; DROP TABLE IF EXISTS Distribution CASCADE ; DROP TABLE IF EXISTS Format CASCADE ; DROP TABLE IF EXISTS Job CASCADE ; DROP TABLE IF EXISTS JobStatus CASCADE ; DROP TABLE IF EXISTS Package CASCADE ; DROP TABLE IF EXISTS PackageInstance CASCADE ; DROP TABLE IF EXISTS Status CASCADE ; DROP TABLE IF EXISTS Suite CASCADE ; DROP TABLE IF EXISTS SuiteArches CASCADE ; DROP TABLE IF EXISTS Blacklist CASCADE ; DROP TABLE IF EXISTS BuildRequest CASCADE ; DROP TABLE IF EXISTS BuildEnv CASCADE ; DROP TABLE IF EXISTS BuildEnvSuiteArch CASCADE ; DROP TABLE IF EXISTS schema_version CASCADE ; -- Create Tables - Changed to add NOT NULLs CREATE TABLE schema_version ( id SERIAL PRIMARY KEY NOT NULL ); -- The schema version has to be updated every time we change the schema. -- Make sure to create an update script in vN.sql in updates to allow -- existing databases to be upgraded INSERT INTO schema_version(id) VALUES (5); CREATE TABLE Arch ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; CREATE TABLE BuildClients ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; CREATE TABLE Distribution ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; COMMENT ON TABLE Distribution IS 'For example this could be Debian Squeeze, Ubuntu, Windows 7' ; CREATE TABLE Format ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; COMMENT ON TABLE Format IS 'This is the format of the package instance, for example MSI, deb, rpm, tarball' ; CREATE TABLE Job ( id SERIAL PRIMARY KEY NOT NULL, PackageInstance_id bigint NOT NULL, BuildClient_id bigint ) ; COMMENT ON TABLE Job IS 'A Job is how we build a given package instance, multiple jobs may be submitted for a single package.' ; CREATE TABLE JobStatus ( id SERIAL PRIMARY KEY NOT NULL, Job_id bigint NOT NULL, Status_id bigint NOT NULL, time timestamp NOT NULL DEFAULT now() ) ; CREATE TABLE Package ( id SERIAL PRIMARY KEY NOT NULL, Version text NOT NULL, Name text NOT NULL ) ; CREATE TABLE Blacklist ( id SERIAL PRIMARY KEY NOT NULL, field text NOT NULL, regex text NOT NULL ); COMMENT ON TABLE Blacklist IS 'Blacklist is used internally by submitjob to determine if certain packages are centrally blacklisted using regexes' ; CREATE TABLE PackageInstance ( id SERIAL PRIMARY KEY NOT NULL, Package_id bigint NOT NULL, BuildEnv_id bigint, Arch_id bigint NOT NULL, Suite_id bigint NOT NULL, Dist_id bigint NOT NULL, Format_id bigint NOT NULL, master boolean NOT NULL DEFAULT false -- Master tell us if this instance is the first submitted for a given package, this information is acted on by certain build clients ) ; COMMENT ON COLUMN PackageInstance.master IS 'Master tell us if this instance is the first submitted for a given package, this information is acted on by certain build clients' ; CREATE TABLE Status ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; CREATE TABLE Suite ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; CREATE TABLE SuiteArches ( id SERIAL PRIMARY KEY NOT NULL, Suite_id bigint NOT NULL, Arch_id bigint NOT NULL, master_weight bigint NOT NULL DEFAULT 0 ) ; CREATE TABLE BuildRequest ( id SERIAL PRIMARY KEY NOT NULL, job bigint NOT NULL, method text NOT NULL, uri text NOT NULL, vcs_id text NOT NULL, buildenv_name text DEFAULT '' ); CREATE TABLE BuildEnv ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ) ; CREATE TABLE BuildEnvSuiteArch ( id SERIAL PRIMARY KEY NOT NULL, BuildEnv_id BIGINT NOT NULL, SuiteArch_id BIGINT NOT NULL, FOREIGN KEY (BuildEnv_id) REFERENCES BuildEnv (id), FOREIGN KEY (SuiteArch_id) REFERENCES SuiteArches (id) ) ; COMMENT ON TABLE BuildRequest IS 'BuildRequest is used to log build details so they can in future be requeued.' ; -- Create Indexes ALTER TABLE Arch ADD CONSTRAINT UQ_Arch_id UNIQUE (id) ; ALTER TABLE BuildClients ADD CONSTRAINT UQ_BuildClients_id UNIQUE (id) ; ALTER TABLE Distribution ADD CONSTRAINT UQ_Distribution_id UNIQUE (id) ; ALTER TABLE Format ADD CONSTRAINT UQ_Format_id UNIQUE (id) ; ALTER TABLE Job ADD CONSTRAINT UQ_Job_id UNIQUE (id) ; ALTER TABLE JobStatus ADD CONSTRAINT UQ_JobStatus_id UNIQUE (id) ; ALTER TABLE Package ADD CONSTRAINT UQ_Package_id UNIQUE (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT UQ_PackageInstance_id UNIQUE (id) ; ALTER TABLE Status ADD CONSTRAINT UQ_Status_id UNIQUE (id) ; ALTER TABLE Suite ADD CONSTRAINT UQ_Suite_id UNIQUE (id) ; ALTER TABLE SuiteArches ADD CONSTRAINT UQ_Stuite_Arches_id UNIQUE (id) ; ALTER TABLE Blacklist ADD CONSTRAINT UQ_Blacklist_id UNIQUE (id) ; ALTER TABLE BuildRequest ADD CONSTRAINT UQ_BuildRequest_id UNIQUE (id) ; -- Create Constraints for uniqueness of some fields ALTER TABLE Arch ADD CONSTRAINT UQ_Arch_name UNIQUE (name) ; ALTER TABLE BuildClients ADD CONSTRAINT UQ_BuildClients_name UNIQUE (name) ; ALTER TABLE Distribution ADD CONSTRAINT UQ_Distribution_name UNIQUE (name) ; ALTER TABLE Format ADD CONSTRAINT UQ_Format_name UNIQUE (name) ; ALTER TABLE Package ADD CONSTRAINT UQ_Package_name_version UNIQUE (name,version) ; ALTER TABLE Status ADD CONSTRAINT UQ_Status_name UNIQUE (name) ; ALTER TABLE Suite ADD CONSTRAINT UQ_Suite_name UNIQUE (name) ; -- Create Foreign Key Constraints ALTER TABLE Job ADD CONSTRAINT FK_Job_BuildClients FOREIGN KEY (BuildClient_id) REFERENCES BuildClients (id) ; ALTER TABLE Job ADD CONSTRAINT FK_Job_PackageInstance FOREIGN KEY (PackageInstance_id) REFERENCES PackageInstance (id) ; ALTER TABLE JobStatus ADD CONSTRAINT FK_JobStatus_Job FOREIGN KEY (Job_id) REFERENCES Job (id) ; ALTER TABLE JobStatus ADD CONSTRAINT FK_JobStatus_Status FOREIGN KEY (Status_id) REFERENCES Status (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_Arch FOREIGN KEY (Arch_id) REFERENCES Arch (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_Distribution FOREIGN KEY (Dist_id) REFERENCES Distribution (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_Format FOREIGN KEY (Format_id) REFERENCES Format (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_Package FOREIGN KEY (Package_id) REFERENCES Package (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_Suite FOREIGN KEY (Suite_id) REFERENCES Suite (id) ; ALTER TABLE SuiteArches ADD CONSTRAINT FK_SuiteArches_Suite FOREIGN KEY (Suite_id) REFERENCES Suite (id) ; ALTER TABLE SuiteArches ADD CONSTRAINT FK_SuiteArches_Arch FOREIGN KEY (Arch_id) REFERENCES Arch (id) ; ALTER TABLE BuildRequest ADD CONSTRAINT FK_BuildRequest_Job FOREIGN KEY (job) REFERENCES Job (id) ; ALTER TABLE PackageInstance ADD CONSTRAINT FK_PackageInstance_BuildEnv FOREIGN KEY (BuildEnv_id) REFERENCES BuildEnv (id) ; pybit-1.0.0/db/updates/0000755000175000017500000000000012146006064014570 5ustar neilneil00000000000000pybit-1.0.0/db/updates/v4.sql0000644000175000017500000000114012106502745015641 0ustar neilneil00000000000000create or replace function run_update() returns void as $$ begin if (SELECT count(*) FROM schema_version WHERE id=3) then CREATE TABLE BuildEnv ( id SERIAL PRIMARY KEY NOT NULL, Name text NOT NULL ); CREATE TABLE BuildEnvSuiteArch ( id SERIAL PRIMARY KEY NOT NULL, BuildEnv_id BIGINT NOT NULL, SuiteArch_id BIGINT NOT NULL, FOREIGN KEY (BuildEnv_id) REFERENCES BuildEnv (id), FOREIGN KEY (SuiteArch_id) REFERENCES SuiteArches (id) ); ALTER TABLE BuildRequest ADD buildenv_name text DEFAULT ''; UPDATE schema_version SET id=4; end if; end; $$ LANGUAGE plpgsql; SELECT run_update(); pybit-1.0.0/db/updates/v5.sql0000644000175000017500000000042312106502745015645 0ustar neilneil00000000000000create or replace function run_update() returns void as $$ begin if (SELECT count(*) from schema_version WHERE id=4) then ALTER TABLE PackageInstance ADD COLUMN buildenv_id bigint; UPDATE schema_version SET id=5; end if; end; $$ LANGUAGE plpgsql; SELECT run_update(); pybit-1.0.0/db/updates/v3.sql0000644000175000017500000000131312106502745015642 0ustar neilneil00000000000000CREATE OR REPLACE FUNCTION make_plpgsql() RETURNS VOID LANGUAGE SQL AS $$ CREATE LANGUAGE plpgsql; $$; SELECT CASE WHEN EXISTS( SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql' ) THEN NULL ELSE make_plpgsql() END; DROP FUNCTION make_plpgsql(); create or replace function run_update() returns void as $$ begin if (SELECT count(*) from schema_version WHERE id=2) then CREATE TABLE BuildRequest ( id SERIAL PRIMARY KEY NOT NULL, job bigint NOT NULL, method text NOT NULL, uri text NOT NULL, vcs_id text NOT NULL, FOREIGN KEY (job) REFERENCES Job (id) ); UPDATE schema_version SET id=3; end if; end; $$ LANGUAGE plpgsql; SELECT run_update(); pybit-1.0.0/db/updates/v1.sql0000644000175000017500000000143712104762776015661 0ustar neilneil00000000000000CREATE OR REPLACE FUNCTION make_plpgsql() RETURNS VOID LANGUAGE SQL AS $$ CREATE LANGUAGE plpgsql; $$; SELECT CASE WHEN EXISTS( SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql' ) THEN NULL ELSE make_plpgsql() END; DROP FUNCTION make_plpgsql(); create or replace function run_update() returns void as $$ begin if not exists (SELECT * FROM information_schema.tables WHERE table_catalog = CURRENT_CATALOG AND table_schema = CURRENT_SCHEMA AND table_name = 'schema_version') then CREATE TABLE schema_version ( id SERIAL PRIMARY KEY NOT NULL ); insert into schema_version(id) values (1); ALTER TABLE suitearches ADD COLUMN master_weight bigint NOT NULL DEFAULT 0; end if; end; $$ LANGUAGE plpgsql; SELECT run_update(); pybit-1.0.0/db/updates/v2.sql0000644000175000017500000000116312106502745015644 0ustar neilneil00000000000000 CREATE OR REPLACE FUNCTION make_plpgsql() RETURNS VOID LANGUAGE SQL AS $$ CREATE LANGUAGE plpgsql; $$; SELECT CASE WHEN EXISTS( SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql' ) THEN NULL ELSE make_plpgsql() END; DROP FUNCTION make_plpgsql(); create or replace function run_update() returns void as $$ begin if (SELECT count(*) from schema_version WHERE id=1) then CREATE TABLE Blacklist ( id SERIAL PRIMARY KEY NOT NULL, field text NOT NULL, regex text NOT NULL ); update schema_version set id =2; end if; end; $$ LANGUAGE plpgsql; SELECT run_update(); pybit-1.0.0/README.txt0000644000175000017500000001016212145727746014254 0ustar neilneil00000000000000pyBit (πβ or πβιϑ) itself is what it says on the tin, the Python Build Integration Toolkit. The aim was to create a distributed AMQP based build system system using python and RabbitMQ. We talk to the queue using python-amqplib, with messages being encoded using python-jsonpickle. python-bottle is used to provide a lightweight HTTP based API, which the associated (static) web GUI can query using the jQuery javascript library. Design The system consists of two parts, a single server, and one to many clients. pybit-web is the server part, and pybit-client the client part. You will also need a PostgreSQL database, which we speak to using python-psycopg2. The front-end (Static HTML) web GUI queries the back-end (python) HTTP API using the jQuery Javascript library. The [HTML side] of the web GUI is not coupled otherwise to the system, as it does not speak to the queue or database directly. Likewise, the client only speaks to the controller using the queue, and the HTTP API, never the database. By loosely coupling components, we aim to make it easy to extend the system to support a variety of different configurations and scenarios. We aim to be flexible enough to build any combination of package type (i.e. DEB, RPM) for any arch, for any system (even, say .MSI installers for MS Windows). Currently however, we are mostly concerned with building ARM and i386 packages targeting Debian GNU/Linux 'Wheezy', and above. Data Exchange We use a well known interchange format (JSON, with the JSONPickle library). We will present a RESTful HTTP based API. i.e. using the GET verb on http://[server]/job shall return a list of all running build jobs, while http://[server]/job/1 shall return the specific job instance with the ID 1. Note that we proxy POST and PUT, and map /[object]/[id]/delete to DELETE. This is as HTML forms, in most browsers can only do GET and POST (JQuery can do more using its AJAX functions). Requirements Note that bottle.py gained a new route rule syntax, amongst other things, in Bottle 0.10x. However, Debian Squeeze has python-bottle 0.8x in the repository. Therefore, we no longer support Squeeze. This can be worked around by overriding with a copy of bottle from upstream. Drop us a message, if you really want Squeeze support. python-requests is available from squeeze-backports, as is python-psycopg2. Do NOT use the version of psycopg2 from squeeze/main, if you intend to use a multi-threaded web server, this is unsupported. Installation In /db, there are scripts to create and populate the database manually. However, when installing on Debian or a Debian derivative, we reccomend you use dbconfig-common and debconf, during the package installation process. See the Debian packaging at https://github.com/codehelp/pybit-debian for more information. Running the software The connection string, hostname, etc.... are stored in /etc/pybit/configs/*. These should be set during the package installation process, but you may edit and configure these as you wish. Then simply start the server first, then the client(s) second. The client will pick up any suitable queued jobs when it comes up. For support: IRC - #pybit on irc.oftc.net irc://irc.oftc.net/pybit (Or use github issues) Packaging requirements * rabbitmq-server http://packages.qa.debian.org/r/rabbitmq-server.html o http://www.rabbitmq.com/ * python-amqplib http://packages.qa.debian.org/p/python-amqplib.html o http://code.google.com/p/py-amqplib/ * python-debian http://packages.qa.debian.org/p/python-debian.html * python-jsonpickle http://packages.qa.debian.org/j/jsonpickle.html * python-bottle http://packages.qa.debian.org/p/python-bottle.html o http://bottlepy.org/ * python-psycopg2 http://packages.qa.debian.org/p/psycopg2.html * python-requests http://packages.qa.debian.org/p/python-requests.html Debian packaging is now on https://github.com/codehelp/pybit-debian Useful links: * http://www.rabbitmq.com/tutorials/tutorial-one-python.html - A RabbitMQ tutorial using Python. * http://jsonpickle.github.com/ * http://initd.org/psycopg/ * http://www.rabbitmq.com/ * http://bottlepy.org/docs/stable/index.html * http://docs.python-requests.org/en/latest/ pybit-1.0.0/configs/0000755000175000017500000000000012146006064014166 5ustar neilneil00000000000000pybit-1.0.0/configs/web/0000755000175000017500000000000012146006064014743 5ustar neilneil00000000000000pybit-1.0.0/configs/web/web.conf0000644000175000017500000000156512145727142016404 0ustar neilneil00000000000000{ "web": { "app": "wsgiref", "debug": false, "interface": "0.0.0.0", "hostname": "localhost", "port": 8080, "reloader": false, "protocol" : "http://", "jqueryurl" : "ajax.googleapis.com/ajax/libs/jquery/1.8.2/jquery.min.js", "jqueryformurl" : "malsup.github.com/jquery.form.js", "installed_path" : "/usr/share/pybit-web/static", "username" : "admin", "password" : "pass" }, "controller": { "debug": false, "rabbit_url": "localhost:5672", "rabbit_userid": "guest", "rabbit_password": "guest", "rabbit_virtual_host": "/", "rabbit_insist": "False" }, "db": { "debug": false, "hostname": "", "databasename": "pybit", "port": 5432, "user": "postgres", "password" : null } } pybit-1.0.0/configs/watcher/0000755000175000017500000000000012146006064015623 5ustar neilneil00000000000000pybit-1.0.0/configs/watcher/watcher.conf0000644000175000017500000000046512107360611020132 0ustar neilneil00000000000000{ "configured": false, "dryrun": true, "sleeptime": 3, "rules": { "/tmp/watcher/1": { "repobase": "/path/to/repo1", "rule": "squeeze" }, "/tmp/watcher/2": { "repobase": "/path/to/repo2", "rule": "wheezy" } } }pybit-1.0.0/configs/buildd-test.conf0000644000175000017500000000236212145727002017261 0ustar neilneil00000000000000{ "count": 4, "vcs_id1": "8356", "method_type1": "svn", "suite1": "pybit", "package1": "multistrap", "version1": "2.1.20", "architecture1": "i386", "source1": "multistrap", "uri1": "http://www.emdebian.org/svn/current/host/trunk/multistrap/trunk", "pkg_format1": "deb", "distribution1": "debian", "role1": "master", "commands1": "", "vcs_id2": "", "package2": "apt-transport-debtorrent", "method_type2": "svn", "version2": "0.2.2", "architecture2": "i386", "source2": "apt-transport-debtorrent", "suite2": "pybit", "uri2": "svn://svn.debian.org/debtorrent/apt-transport-debtorrent/trunk", "pkg_format2": "deb", "distribution2": "debian", "role2": "slave", "commands2": "", "vcs_id3": "watch-test", "package3": "pybit", "method_type3": "git", "version3": "0.4.3-1", "architecture3": "i386", "source3": "pybit", "suite3": "pybitwheezy", "uri3": "https://github.com/nicholasdavidson/pybit.git", "pkg_format3": "deb", "distribution3": "debian", "commands3": "", "role3": "master", "vcs_id4": "", "package4": "qof", "method_type4": "apt", "version4": "0.8.1-1", "architecture4": "i386", "source4": "qof", "suite4": "pybit", "uri4": "", "pkg_format4": "deb", "distribution4": "Debian", "commands4": "", "role4": "master" } pybit-1.0.0/configs/client/0000755000175000017500000000000012146006064015444 5ustar neilneil00000000000000pybit-1.0.0/configs/client/default_client.conf0000644000175000017500000000051512145727113021302 0ustar neilneil00000000000000{ "clientid": "1", "host_arch": "i386", "use_lvm": true, "suites": ["unstable", "raring"], "distribution": "Debian", "pkg_format": "deb", "buildroot": "/home/buildd/pybit", "host": "localhost", "port": "5672", "userid": "guest", "password": "guest", "vhost": "/", "dput": "-U", "dput_dest": "local", "poll_time": 60 } pybit-1.0.0/configs/client/dput.cf0000644000175000017500000000025412107360611016731 0ustar neilneil00000000000000[local] fqdn = localhost incoming = /tmp/ login = buildd method = local allow_unsigned_uploads = true pybit-1.0.0/configs/client/client.conf0000644000175000017500000000050212057436607017601 0ustar neilneil00000000000000{ "clientid": "", "host_arch": "", "use_lvm": true, "distribution": "", "pkg_format": "", "buildroot": "/home/buildd/pybit", "host": "localhost", "port": "5672", "suites": ["unstable"], "dry_run": true, "userid": "guest", "password": "guest", "vhost": "/", "dput": "-U", "dput_dest": "", "poll_time": 60 } pybit-1.0.0/configs/web_local.conf0000644000175000017500000000134312145727021016767 0ustar neilneil00000000000000{ "web": { "app": "wsgiref", "debug": true, "hostname": "localhost", "port": 8080, "reloader": false, "protocol": "http://", "jqueryurl" : "ajax.googleapis.com/ajax/libs/jquery/1.8.2/jquery.min.js", "jqueryformurl" : "malsup.github.com/jquery.form.js" }, "controller": { "rabbit_url": "localhost: 5672", "rabbit_userid": "guest", "rabbit_password": "guest", "rabbit_virtual_host": "/", "rabbit_insist": "False", "webserver_url": "localhost:8080" }, "db": { "hostname": "localhost", "databasename": "pybit", "port": 5432, "user": "pybit", "password" : "pybit" } } pybit-1.0.0/PKG-INFO0000644000175000017500000000041212146006064013630 0ustar neilneil00000000000000Metadata-Version: 1.0 Name: pybit Version: 1.0.0 Summary: PyBit buildd integrated toolkit Home-page: https://github.com/nicholasdavidson/pybit.git Author: PyBit Build System user Author-email: codehelp@debian.org License: gpl2 Description: UNKNOWN Platform: UNKNOWN pybit-1.0.0/pybit-watcher0000755000175000017500000001362012145644126015256 0ustar neilneil00000000000000#!/usr/bin/python # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # -*- coding: utf-8 -*- import optparse import pyinotify import daemon import pybit import os import sys import logging import signal import subprocess import time from pybit.daemonlogger import LoggingDaemonContext, FileLikeLogger from pyinotify import ProcessEvent META="PYBIT_WATCHER_" PIDFILE = "/var/run/pybit-watcher.pid" def signal_handler(signal, frame): try: print '\nClosing %s' % os.path.basename(__file__) sys.exit (os.EX_OK) except Exception as e: raise Exception('Error in signal handler: ' + str(e)) return def getDaemonLogger (filePath, format = None) : FORMAT = format or '%(asctime)s %(msg)s' logging.basicConfig(filename=filePath, format=FORMAT, level=logging.DEBUG) return logging.getLogger() class EventHandler(pyinotify.ProcessEvent): def process_IN_CREATE(self, event): if os.path.isfile(event.pathname) and event.pathname.endswith(".changes"): logging.debug("Sleeping for %ss" % self.sleeptime) time.sleep(self.sleeptime) if event.path in self.settings['rules'] : rule = self.settings['rules'][event.path] cmd = "reprepro -b %s processincoming %s" % (rule['repobase'], rule['rule']) if ('user' in self.settings and self.settings['user'] != ''): cmd = "su %s -c '%s'" % (self.settings['user'], cmd) if ('dryrun' not in self.settings or self.settings['dryrun'] == False): process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = process.communicate() if process.returncode: logging.debug(stderr) logging.debug("reprepo command failed with code: %s" % process.returncode) else: logging.debug(cmd) else: logging.debug("dry-run: %s" % cmd) else: logging.debug("couldn't find a rule which matched the watched path.") def __init__(self, settings): self.settings = settings self.sleeptime = 3 if 'sleeptime' in self.settings: self.sleeptime = self.settings['sleeptime'] ProcessEvent.__init__(self) def run(settings): try: signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) except Exception as e: raise Exception('Error configuring signal handler: ' + str(e)) mask = pyinotify.IN_CREATE if 'dryrun' in settings and settings['dryrun'] == True: logging.debug("Starting in dryrun mode") wm = pyinotify.WatchManager() handler = EventHandler(settings) notifier = pyinotify.Notifier(wm, handler) for path in settings['rules'].keys(): wdd = wm.add_watch(path, mask, rec=True) notifier.loop() if __name__ == '__main__': parser = optparse.OptionParser() #options we can override in the config file. groupConfigFile = optparse.OptionGroup(parser, "Config File Defaults","All the options which have defaults read from a config file.") groupConfigFile.add_option("--dry-run", dest="dryrun", action="store_true", help="Controls if we simulate or do we actually run.", metavar=META + "DRYRUN") groupConfigFile.add_option("--user", dest="user", help="Which user do we run as?", metavar=META + "USER") groupConfigFile.add_option("--sleeptime", dest="sleeptime", help="The number of seconds we wait after a changes file is created before we run the reprepro command.", metavar=META + "SLEEPTIME") parser.add_option("--conf_file", dest="conf_file", default="watcher/watcher.conf", help="Config file to read settings from, defaults to watcher.conf which will be read from configs/watcher and /etc/pybit/watcher in turn.", metavar=META + "CONF_FILE") parser.add_option("-v", dest="verbose", action="store_true", default=False, help="Turn on verbose messages.", metavar=META+"VERBOSE") parser.add_option("-d", dest="daemonise", action="store_true", default=False, help="Daemonise with output going to /var/log/pybit-watcher", metavar=META+"DAEMONISE") (options, args) = parser.parse_args() context = None if options.daemonise : testLogger = getDaemonLogger('/var/log/pybitwatcher.log') stdoutLogger = getDaemonLogger('/dev/null') stderrLogger = getDaemonLogger('/dev/null') context = LoggingDaemonContext() context.loggers_preserve=[testLogger] context.stdout_logger = stdoutLogger context.stderr_logger = stderrLogger logging.debug ("I: Daemonised") else : # FORMAT = format or '%(asctime)s %(msg)s' logging.basicConfig(level=logging.DEBUG) logging.debug ("I: Not daemonised") (settings, opened_path) = pybit.load_settings(options.conf_file) if settings == {}: logging.debug("Couldn't load configuration from %s" % opened_path) sys.exit(-1) if 'configured' in settings and settings['configured'] == False: logging.debug ("Please configure the Reprepro watcher. Edit %s" % opened_path) sys.exit(os.EX_OK) settings = pybit.merge_options(settings, groupConfigFile, options) if options.daemonise: with context : pid = str(os.getpid()) file(PIDFILE, 'w').write(pid) run(settings) else: logging.debug ("I: Running watcher...") run(settings) pybit-1.0.0/pybit.egg-info/0000755000175000017500000000000012146006064015357 5ustar neilneil00000000000000pybit-1.0.0/pybit.egg-info/PKG-INFO0000644000175000017500000000041212146006064016451 0ustar neilneil00000000000000Metadata-Version: 1.0 Name: pybit Version: 1.0.0 Summary: PyBit buildd integrated toolkit Home-page: https://github.com/nicholasdavidson/pybit.git Author: PyBit Build System user Author-email: codehelp@debian.org License: gpl2 Description: UNKNOWN Platform: UNKNOWN pybit-1.0.0/pybit.egg-info/SOURCES.txt0000644000175000017500000000403212146006064017242 0ustar neilneil00000000000000MANIFEST.in README.txt apache_config.txt application.wsgi buildd-test.py makeme pybit-client pybit-watcher pybit_web.py setup.py configs/buildd-test.conf configs/web_local.conf configs/client/client.conf configs/client/default_client.conf configs/client/dput.cf configs/watcher/watcher.conf configs/web/web.conf db/README.txt db/populate.sql db/schema.sql db/updates/v1.sql db/updates/v2.sql db/updates/v3.sql db/updates/v4.sql db/updates/v5.sql examples/procmail-mbox.py hook/changes-debian hook/git-postcommit-debian hook/svn-postcommit-debian pybit/__init__.py pybit/daemonlogger.py pybit/models.py pybit.egg-info/PKG-INFO pybit.egg-info/SOURCES.txt pybit.egg-info/dependency_links.txt pybit.egg-info/top_level.txt pybitclient/README pybitclient/__init__.py pybitclient/apt.py pybitclient/buildclient.py pybitclient/crossdebian.py pybitclient/debianclient.py pybitclient/git.py pybitclient/sbuild-cross.sh pybitclient/sbuild-orig.sh pybitclient/subversion.py pybitweb/__init__.py pybitweb/bottle_basic_auth.py pybitweb/buildd.py pybitweb/controller.py pybitweb/db.py pybitweb/job.py pybitweb/lookups.py pybitweb/package.py pybitweb/packageinstance.py pybitweb/static/arches.htm pybitweb/static/blacklist.htm pybitweb/static/buildd.htm pybitweb/static/dashboard.htm pybitweb/static/dists.htm pybitweb/static/envs.htm pybitweb/static/favicon.ico pybitweb/static/favicon.png pybitweb/static/formats.htm pybitweb/static/index.htm pybitweb/static/job.htm pybitweb/static/lookups.htm pybitweb/static/package.htm pybitweb/static/packageinstance.htm pybitweb/static/statuses.htm pybitweb/static/suites.htm pybitweb/static/bootstrap/css/bootstrap-responsive.css pybitweb/static/bootstrap/css/bootstrap-responsive.min.css pybitweb/static/bootstrap/css/bootstrap.css pybitweb/static/bootstrap/css/bootstrap.min.css pybitweb/static/bootstrap/img/glyphicons-halflings-white.png pybitweb/static/bootstrap/img/glyphicons-halflings.png pybitweb/static/bootstrap/js/bootstrap.js pybitweb/static/bootstrap/js/bootstrap.min.js test/plugintest.py test/pybitclient-test.py test/webapi-test.pypybit-1.0.0/pybit.egg-info/top_level.txt0000644000175000017500000000003312146006064020105 0ustar neilneil00000000000000pybitweb pybitclient pybit pybit-1.0.0/pybit.egg-info/dependency_links.txt0000644000175000017500000000000112146006064021425 0ustar neilneil00000000000000 pybit-1.0.0/setup.py0000644000175000017500000000271712146000037014251 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # setup.py # # Copyright 2012 Neil Williams # Copyright (C) 2006 James Westby # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from setuptools import setup setup(name='pybit', version='1.0.0', description='PyBit buildd integrated toolkit', license='gpl2', url='https://github.com/nicholasdavidson/pybit.git', packages=['pybit', 'pybitclient', 'pybitweb',], maintainer='PyBit Build System user', maintainer_email='codehelp@debian.org', include_package_data = True, exclude_package_data = { 'pybitclient' : [ 'sbuild-cross.sh' , 'sbuild-orig.sh', 'README'], 'pybitweb' : [ 'static/*' ] } ) pybit-1.0.0/pybit-client0000755000175000017500000001571712146004377015107 0ustar neilneil00000000000000#!/usr/bin/python # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # -*- coding: utf-8 -*- from amqplib import client_0_8 as amqp import optparse import tty import sys import signal import jsonpickle import requests import pybit import os import daemon import daemon.pidlockfile import time import logging import lockfile from logging.handlers import WatchedFileHandler from pybitclient import PyBITClient from pybitclient.debianclient import DebianBuildClient from pybitclient.subversion import SubversionClient from pybit.models import AMQPConnection META = "PYBIT_CLIENT_" PIDFILE = "/var/run/pybit-client.pid" def signal_handler(signal, frame): try: print '\nClosing %s' % os.path.basename(__file__) sys.exit (os.EX_OK) except Exception as e: raise Exception('Error in signal handler: ' + str(e)) return def run(settings) : conn_info = AMQPConnection(pybit.get_client_queue(settings['clientid']), settings['host'], settings['userid'], settings['password'], settings['vhost']) with PyBITClient(settings['host_arch'], settings['distribution'], settings['pkg_format'], settings['suites'], conn_info, settings) as build_client: while True: if build_client is not None: build_client.wait() else: sys.exit (os.EX_OSERR) def getDaemonLogger (filePath, format = None) : logger = logging.getLogger() logger.setLevel(logging.DEBUG) try: watchedHandler = WatchedFileHandler(filePath) except Exception as e: return e watchedHandler.setFormatter(logging.Formatter(format or '%(asctime)s %(msg)s')) logger.addHandler(watchedHandler) return (logger, watchedHandler) if __name__ == '__main__': parser = optparse.OptionParser() #options we can override in the config file. groupConfigFile = optparse.OptionGroup(parser, "Config File Defaults","All the options which have defaults read from a config file.") groupConfigFile.add_option("--host_arch", dest="host_arch", help="Architecture to use, defaults to i386", metavar= META + "ARCH") groupConfigFile.add_option("--distribution", help="Distribution to use, defaults to Debian", metavar= META +"DIST") groupConfigFile.add_option("--pkg_format", dest="pkg_format", help="Package type to use, defaults to deb", metavar= META +"FORMAT") groupConfigFile.add_option("--suite", dest="suites", action="append", help="Suite to use, defaults to unstable. Can be passed multiple times.", metavar= META +"SUITES") groupConfigFile.add_option("--host", dest="host", help="host to connect to, defaults to localhost.", metavar=META + "HOST") groupConfigFile.add_option("--vhost", dest="vhost", help="vhost to connect to, defaults to localhost.", metavar=META + "VHOST") groupConfigFile.add_option("--userid", dest="userid", help="user id to use for AMQP server, defaults to guest.", metavar=META + "USERID") groupConfigFile.add_option("--port", dest="port", help="port to use for AMQP server, defaults to 5672", metavar=META + "PORT") groupConfigFile.add_option("--password", dest="password", help="password to use for AMQP server, defaults to guest", metavar=META + "PASSWORD") groupConfigFile.add_option("--clientid", dest="clientid", help="id to use for build-client control queue, defaults to 1 but is unique per amqp server.", metavar=META + "CLIENTID") groupConfigFile.add_option("--poll_time", dest="poll_time", help="interval at which to poll the queue, defaults to 60 s.", metavar=META + "POLL_TIME") groupConfigFile.add_option("--log_file", dest="log_file", help="File to log client information.", metavar=META + "LOG_FILE") groupConfigFile.add_option("--pid_file", dest="pid_file", help="File to pid client information.", metavar=META + "PID_FILE") parser.add_option_group(groupConfigFile) parser.add_option("--conf_file", dest="conf_file", default="client/client.conf", help="Config file to read settings from, defaults to client.conf which will be read from configs/client.conf and /etc/pybit/client.conf in turn.", metavar=META + "CONF_FILE") parser.add_option("-v", dest="verbose", action="store_true", default=False, help="Turn on verbose messages.", metavar=META+"VERBOSE") parser.add_option("-d", dest="daemonise", action="store_true", default=False, help="Daemonise with output going to /var/log/pybitclient.log by defalt", metavar=META+"DAEMONISE") (options, args) = parser.parse_args() (settings, opened_path) = pybit.load_settings(options.conf_file) settings = pybit.merge_options(settings, groupConfigFile, options) # Add default settings we want to not overwrite files. if ( not 'log_file' in settings ): settings['log_file'] = "/var/log/pybitclient.log" if ( not 'pid_file' in settings ): settings['pid_file' ] = PIDFILE if options.daemonise : client_logger, watched_file_handler = getDaemonLogger( settings['log_file'] ) if ( isinstance( client_logger, Exception ) ): print( "Fatal error creating client_logger: " + str( client_logger ) ) sys.exit(os.EX_OSERR) lockfile = daemon.pidlockfile.PIDLockFile(settings['pid_file']) if (lockfile.is_locked()): logging.debug("PIDFile %s already locked" % settings['pid_file']) sys.exit(os.EX_OSERR) context = daemon.DaemonContext( working_directory=os.getcwd(), pidfile=lockfile, files_preserve = [ watched_file_handler.stream ], stderr=watched_file_handler.stream, stdout=watched_file_handler.stream) context.signal_map = { signal.SIGTERM: signal_handler, signal.SIGHUP: signal_handler } logging.debug ("Daemonised") else : # FORMAT = format or '%(asctime)s %(msg)s' logging.basicConfig(level=logging.DEBUG) logging.debug ("Not daemonised") if settings == {}: logging.debug ("No settings found - exiting") sys.exit(os.EX_OSERR) if 'configured' in settings and settings['configured'] == False: logging.debug ("Please configure your client.") sys.exit(os.EX_OK) if options.daemonise: # try: with context : logging.debug ("I: Running build client.") run(settings) # except Exception as e: # logging.debug( "Failed to daemonise: " + str(e) ) # sys.exit(os.EX_OSERR) else : print "Hit Ctrl-C to quit." run(settings) pybit-1.0.0/MANIFEST.in0000644000175000017500000000112712145727344014307 0ustar neilneil00000000000000include buildd-test.py include pybitclient/dput.cf pybitclient/sbuild-cross.sh pybitclient/sbuild-orig.sh include configs/* test/* configs/client/* configs/watcher/* configs/web/* include hook/* db/* examples/* include db/updates/* include pybitweb/static/* include pybitweb/static/bootstrap/* include pybitweb/static/bootstrap/css/* include pybitweb/static/bootstrap/img/* include pybitweb/static/bootstrap/js/* include pybitweb/static/js/* include pybit-client include pybit_web.py include pybitclient/README include apache_config.txt include application.wsgi include pybit-watcher include makeme pybit-1.0.0/hook/0000755000175000017500000000000012146006064013476 5ustar neilneil00000000000000pybit-1.0.0/hook/svn-postcommit-debian0000755000175000017500000001121412145706071017651 0ustar neilneil00000000000000#!/bin/sh set -e # svn-postcommit-debian - Debian-based SVN postcommit hook # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. ######## Configuration ############ # Network location of the pybit-web server PYBIT_HTTP="http://localhost/job/vcshook" # Anonymous SVN URI stub ANON_SVN="http://localhost/svn" # Distribution DISTRO_NAME="Debian" USERNAME="admin" PASSWORD="pass" ####### End Configuration ######### # POST-COMMIT HOOK # # The post-commit hook is invoked after a commit. Subversion runs # this hook by invoking a program (script, executable, binary, etc.) # named 'post-commit' (for which this file is a template) with the # following ordered arguments: # # [1] REPOS-PATH (the path to this repository) # [2] REV (the number of the revision just committed) # # The default working directory for the invocation is undefined, so # the program should set one explicitly if it cares. # # Because the commit has already completed and cannot be undone, # the exit code of the hook program is ignored. The hook program # can use the 'svnlook' utility to help it examine the # newly-committed tree. # # On a Unix system, the normal procedure is to have 'post-commit' # invoke other programs to do the real work, though it may do the # work itself too. # # Note that 'post-commit' must be executable by the user(s) who will # invoke it (typically the user httpd runs as), and that user must # have filesystem-level permission to access the repository. # # On a Windows system, you should name the hook program # 'post-commit.bat' or 'post-commit.exe', # but the basic idea is the same. # # The hook program typically does not inherit the environment of # its parent process. For example, a common problem is for the # PATH environment variable to not be set to its usual value, so # that subprograms fail to launch unless invoked via absolute path. # If you're having unexpected problems with a hook program, the # culprit may be unusual (or missing) environment variables. # # Here is an example hook script, for a Unix /bin/sh interpreter. # For more examples and pre-written hooks, see those in # /usr/share/subversion/hook-scripts, and in the repository at # http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and # http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ REPOS="$1" REV="$2" METHOD="svn" RES=`svnlook changed ${REPOS} --revision ${REV}|grep "debian/changelog" || true` PKG=`echo "${RES}" | cut -d' ' -f4- || true` if [ ! "${RES}" ]; then exit 0 fi if [ ! "${PKG}" ]; then exit 0 fi for CHANGELOG in $PKG do DEBDIR=`dirname $CHANGELOG` PKGDIR=`dirname $DEBDIR` PKG=`basename $PKGDIR` CHGLG=`echo ${CHANGELOG} | sed -e 's/^. //'` TMPFILE=`mktemp` svn cat file://${REPOS}/${CHGLG}@${REV} > ${TMPFILE} VERSION=`dpkg-parsechangelog -l${TMPFILE} | grep '^Version: ' | sed -e 's/^.*: //'` SUITE=`dpkg-parsechangelog -l${TMPFILE} | grep '^Distribution: ' | sed -e 's/^.*: //'` # replace the svndirectory name with the actual source package name, in case it differs PKG_PATH=`dpkg-parsechangelog -l${TMPFILE} | grep '^Source: ' |sed -e 's/^.*: //'` rm ${TMPFILE} CTRL=`echo ${CHGLG} | sed -e 's/changelog$/control/'` LIST=`svn cat file://${REPOS}/${CTRL}@${REV} | grep '^Architecture: '| cut -d':' -f2|sort -u|tr '\n' ','|sed -e 's/ //g'| sed -e 's/,$//'` DATASTR="--data-urlencode method=svn" DATASTR="${DATASTR} --data-urlencode distribution=${DISTRO_NAME}" DATASTR="${DATASTR} --data-urlencode vcs_id=${REV}" DATASTR="${DATASTR} --data-urlencode architecture_list=${LIST}" DATASTR="${DATASTR} --data-urlencode package_version=${VERSION}" DATASTR="${DATASTR} --data-urlencode package=${PKG_PATH}" DATASTR="${DATASTR} --data-urlencode suite=${SUITE}" DATASTR="${DATASTR} --data-urlencode format=deb" DATASTR="${DATASTR} --data-urlencode uri=${ANON_SVN}/${PKGDIR}" /usr/bin/curl -i -X POST ${PYBIT_HTTP} ${DATASTR} --user "${USERNAME}:${PASSWORD}" done pybit-1.0.0/hook/git-postcommit-debian0000644000175000017500000000066712045012715017627 0ustar neilneil00000000000000 # for a client side post-commit hook, each client needs to work in a different branch # otherwise one commit will cancel the build of the previous commit # for a server side post-receive hook, the server needs access to the pybit webapi # http://stackoverflow.com/questions/3616648/git-post-commit-hook-script-on-committed-files # post-commit takes no parameters # git log -1 HEAD # post-receive hook: # https://gist.github.com/585746 pybit-1.0.0/hook/changes-debian0000755000175000017500000000363512146006033016257 0ustar neilneil00000000000000#!/usr/bin/perl use strict; use warnings; use vars qw/ $username $password $pybit_http $distro_name $suite $arch $ver $pkg $sep $curl /; # changes-debian - Debian-based procmail handler for changes files # # Copyright 2013 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. ######## Configuration ############ # Network location of the pybit-web server $pybit_http="http://localhost/job/vcshook"; # Distribution $distro_name="Debian"; $username="admin"; $password="pass"; ####### End Configuration ######### $sep = "--data-urlencode"; while (<>) { m/^Distribution: (\S+)/m and $suite = $1; m/^Architecture: (source)? ?(\S+)/m and $arch = $2; next if (m/^Version: GnuPG/); m/^Version: (\S+)/m and $ver = $1; m/^Source: (\S+)/m and $pkg = $1; } exit (0) if (not defined $suite or not defined $arch or not defined $pkg); $curl = "/usr/bin/curl -i -X POST $pybit_http "; $curl .= "$sep method=apt $sep distribution=$distro_name "; $curl .= "$sep vcs_id= $sep architecture_list=$arch "; $curl .= "$sep package_version=$ver $sep package=$pkg "; $curl .= "$sep suite=$suite $sep format=deb $sep uri="; $curl .= " --user \"$username:$password\"\n"; system ($curl); pybit-1.0.0/apache_config.txt0000644000175000017500000000103312145727254016054 0ustar neilneil00000000000000 WSGIDaemonProcess pybitweb user=pybitweb group=pybitweb processes=1 threads=5 WSGIProcessGroup pybitweb WSGIScriptAlias / /usr/share/pybit-web/application.wsgi WSGIPassAuthorization On WSGIApplicationGroup %{GLOBAL} WSGIPassAuthorization On Order deny,allow Allow from all LogLevel info ErrorLog ${APACHE_LOG_DIR}/pybit-error.log CustomLog ${APACHE_LOG_DIR}/pybit-access.log combined pybit-1.0.0/pybitclient/0000755000175000017500000000000012146006064015064 5ustar neilneil00000000000000pybit-1.0.0/pybitclient/subversion.py0000644000175000017500000001032112145766622017646 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # subversion.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os import pybitclient from pybitclient.buildclient import VersionControlHandler import logging class SubversionClient(VersionControlHandler): def fetch_source(self, buildreq, conn_data): retval = None if buildreq.transport.method != self.method: retval = "wrong_method" if not retval : self.workdir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) if (buildreq.transport.vcs_id is not None): command = "svn export %s@%s %s" % (buildreq.transport.uri, buildreq.transport.vcs_id, self.workdir) elif (buildreq.transport.uri is not None): command = "svn export %s %s" % (buildreq.transport.uri, self.workdir) else: logging.warn ("E: Could not fetch source, no method URI found") retval = "unrecognised uri" if not retval : if pybitclient.run_cmd (command, self.settings["dry_run"], None) : retval = "fetch_source" if not retval : retval = "success" pybitclient.send_message (conn_data, retval) if retval == "success": return 0 else : return 1 def get_srcdir (self): return self.workdir def clean_source (self, buildreq, conn_data) : retval = None if buildreq.transport.method != self.method: retval = "wrong_method" if not retval : # look for a _source.changes file generated when we made the .dsc src_chgs = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, ("%s_%s_source.changes" % (buildreq.get_package(), buildreq.get_version() ) ) ) if (os.path.exists (src_chgs)): command = "dcmd rm -f %s" % (src_chgs) if pybitclient.run_cmd (command, self.settings["dry_run"], None): retval = "source-clean-fail" else : # check for just the .dsc src_chgs = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, ("%s_%s.dsc" % (buildreq.get_package(), buildreq.get_version() ) ) ) if (os.path.exists (src_chgs)): command = "dcmd rm -f %s" % (src_chgs) if pybitclient.run_cmd (command, self.settings["dry_run"], None): retval = "source-clean-fail" if not retval : self.cleandir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) command = "rm -rf %s" % (self.cleandir) if pybitclient.run_cmd (command, self.settings["dry_run"], None) : retval = "failed_clean" if not retval : retval = "success" pybitclient.send_message (conn_data, retval) # return the exit value of the process - exit (0) for success. if retval == "success": return 0 else : return 1 def __init__(self, settings): VersionControlHandler.__init__(self, settings) self.method = 'svn'; def createPlugin(settings) : return SubversionClient (settings) pybit-1.0.0/pybitclient/sbuild-cross.sh0000755000175000017500000000175312045012715020040 0ustar neilneil00000000000000#!/bin/sh # Copyright 2012 Neil Williams # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. set -e DSC=`find / -name ${SBUILD_BUILD_DSC_File}` DIR=`dirname ${DSC}` cd ${DIR} dpkg-source -x ${DSC} cd ${SBUILD_BUILD_DSC_Dir} embuilddeps -a armel pybit-1.0.0/pybitclient/debianclient.py0000644000175000017500000003403212145766615020077 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # debian.py # # Copyright 2012, 2013 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # If using with the git vcs handler, see also http://wiki.debian.org/GitSrc # There could also be merit in renaming this as DebianSbuild and # then supporting DebianSVN and DebianGit which would use # svn-buildpackage & git-buildpackage respectively, instead of sbuild. import os import logging import pybitclient from pybitclient.buildclient import PackageHandler from pybit.models import BuildRequest, checkValue class DebianBuildClient(PackageHandler): dput_cfg = "" #FIXME dput_dest = "" def _overall_success(self, message, conn_data): error = 1 #If we have a message set we send back the message and failure if message : pybitclient.send_message (conn_data, message) else: pybitclient.send_message (conn_data, "success") error = 0 return error def update_environment(self,name,pkg, conn_data): retval = None command = "schroot --directory / -u root -c %s -- apt-get update > /dev/null 2>&1" % (name) if pybitclient.run_cmd (command, self.settings["dry_run"], None) : retval = "build_update" return retval def build_command_handler (self, buildreq, conn_data) : retval = None logfile = self.get_buildlog (self.settings["buildroot"], buildreq) # expect fakeroot debian/rules rules-target package_dir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) parts = buildreq.commands.split(' ') if len(parts) != 3 : retval = "failed-custom-command-len" if retval : return retval # only allow debian/rules targets, specified in full if parts[0] != "fakeroot" or parts[1] != "debian/rules" or parts[2] is None : retval = "failed-custom-command-parts" if retval : return retval # debian/rules targets must be run in the package_dir and # a command passed to schroot needs to be accessible inside the # chroot and therefore copied to ${HOME} so that schroot copies it again, # into the chroot itself. orig_sh = "/usr/share/pybitclient/sbuild-orig.sh" command = "(cp %s %s/sbuild-orig.sh ; schroot --directory / -n -u root -c %s -- %s/sbuild-orig.sh %s %s ; rm %s/sbuild-orig.sh)" % (orig_sh, self.settings["buildroot"], buildreq.get_suite(), self.settings["buildroot"], package_dir, parts[2], self.settings["buildroot"]) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "custom-command-error" return retval def orig_source_handler (self, buildreq, conn_data) : retval = None logfile = self.get_buildlog (self.settings["buildroot"], buildreq) srcdir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) version = buildreq.get_version() if '-' not in version : # native package, nothing to do for the orig source. return retval if self.settings["dry_run"] : logging.debug("I: %s (%s) is not a native package - need original source" % (buildreq.get_package(), version)) offset = version.find('-') # strip the debian packaging part of the version string origversion = version[0:offset] origtar = os.path.join (srcdir, "%s_%s.orig.tar.gz" % (buildreq.get_package(), origversion)) if os.path.isfile (origtar) : # have .gz return retval # check for .tar.bz2 origtar = os.path.join (srcdir, "%s_%s.orig.tar.bz2" % (buildreq.get_package(), origversion)) if os.path.isfile (origtar) : # have .bz2 return retval # use a debian/watch file and uscan package_dir = "%s/%s" % (srcdir, buildreq.get_package()) watch = os.path.join (srcdir, package_dir, "debian", "watch") logging.debug ("I: Looking for '%s' as watch file." % watch) if os.path.isfile (watch) or self.settings["dry_run"] : logging.debug ("I: Using '%s' as watch file." % watch) command = "(cd %s ; uscan --destdir ../ --repack --force-download --download-version %s)" % (os.path.join(srcdir, buildreq.get_package()), origversion) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "watch-failed" return retval # fall back to apt-get source else : command = "(cd ../ ; apt-get -d source %s/%s)" % (buildreq.get_package(), buildreq.get_suite()) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): logging.debug("I: apt-get source failed, proceeding anyway incase its an update of a debian package.") return retval def build_master (self, buildreq, conn_data): retval = None logfile = self.get_buildlog (self.settings["buildroot"], buildreq) if (not isinstance(buildreq, BuildRequest)): logging.warn ("E: not able to identify package name.") retval = "misconfigured" return self._overall_success(retval, conn_data) srcdir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) package_dir = "%s/%s" % (srcdir, buildreq.get_package()) # To check the build-dependencies in advance, we need to ensure the # chroot has an update apt-cache, so can't use apt-update option of # sbuild. The alternative is to update the apt-cache twice per build, # once for the dep check and once before the build. The choice depends # on whether two network trips are more efficient than rewriting the # lvm snapshot before even trying to do any build. chroot_name = buildreq.get_suite() if (buildreq.get_buildenv() is not None): chroot_name = "%s-%s" % (buildreq.get_buildenv(), buildreq.get_suite()) if self.settings["use_lvm"] : update_name = "%s-source" % chroot_name else : update_name = chroot_name retval = self.update_environment (update_name, buildreq, conn_data) # need an extra uscan stage to deal with non-native packages # this requires the upstream release to be accessible to the client. # i.e. unreleased versions of non-native packages cannot be built this way. # See #18 for the unreleased build support issue. if not retval: if hasattr (buildreq, 'commands') and buildreq.commands : retval = self.build_command_handler (buildreq, conn_data) else : #61 - avoid dependency check if not using lvm if self.settings["use_lvm"] and (os.path.isdir(package_dir) or self.settings["dry_run"]) : control = os.path.join (package_dir, 'debian', 'control') dep_check = "/usr/lib/pbuilder/pbuilder-satisfydepends-classic --control" command = "schroot --directory / -u root -c %s -- %s %s" % (chroot_name, dep_check, os.path.realpath(control)) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "build-dep-wait" if not retval : retval = self.orig_source_handler (buildreq, conn_data) if not retval : dsc_file = "%s/%s_%s.dsc" % (srcdir, buildreq.get_package(), buildreq.get_version()) if not os.path.exists (dsc_file) : command = "(cd %s && dpkg-buildpackage -nc -S -d -uc -us)" % (package_dir) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "build_dsc" if not retval : command = "sbuild -A -n -s -d %s %s/%s_%s.dsc" % (chroot_name, srcdir, buildreq.get_package(), buildreq.get_version()) ret = pybitclient.run_cmd (command, self.settings["dry_run"], logfile) if (ret == 3 or ret == 1): retval = "build-dep-wait" elif (ret): retval = "build_binary" if not retval : changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not self.settings["dry_run"] and not os.path.isfile (changes) : logging.warn("E: build_master: Failed to find %s file." % (changes)) retval = "build_changes" if not retval and checkValue ('debsignkey', self.settings) : command = "debsign -k%s %s" % (self.settings['debsignkey'], changes) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "build_sign" return self._overall_success(retval, conn_data) def upload (self, buildreq, conn_data): retval = None logfile = self.get_buildlog (self.settings["buildroot"], buildreq) changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not os.path.isfile (changes) and not self.settings["dry_run"]: logging.warn("E: upload: Failed to find %s file." % (changes)) retval = "upload_changes" if not retval : if (buildreq.get_buildenv() is not None): upload_target = buildreq.get_buildenv() else : upload_target = self.settings["dput"] command = "dput -c %s %s %s %s" % (self.dput_cfg, upload_target, self.settings["dput_dest"], changes) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "upload_fail" if not retval : command = "dcmd rm %s" % (changes) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "post-upload-clean-fail" return self._overall_success(retval, conn_data) def build_slave (self, buildreq, conn_data): retval = None logfile = self.get_buildlog (self.settings["buildroot"], buildreq) srcdir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) package_dir = "%s/%s" % (srcdir, buildreq.get_package()) if os.path.isdir(package_dir) or self.settings["dry_run"]: # need an extra uscan stage to deal with non-native packages # this requires the upstream release to be accessible to the client. # i.e. unreleased versions of non-native packages cannot be built this way. # See #18 for the unreleased build support issue. if hasattr (buildreq, 'commands') and buildreq.commands : retval = self.build_command_handler (buildreq, conn_data) else : retval = self.orig_source_handler (buildreq, conn_data) command = "(cd %s ; dpkg-buildpackage -nc -S -d -uc -us)" % (package_dir) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "build_dsc" chroot_name = buildreq.get_suite() if (buildreq.get_buildenv() is not None): chroot_name = "%s-%s" % (buildreq.get_buildenv(), buildreq.get_suite()) if not retval : command = "sbuild -n --apt-update -d %s %s/%s_%s.dsc" % (chroot_name, srcdir, buildreq.get_package(), buildreq.get_version()) ret = pybitclient.run_cmd (command, self.settings["dry_run"], logfile) if (ret == 3 or ret == 768): retval = "build-dep-wait" elif (ret): retval = "build_binary" if not retval : changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not self.settings["dry_run"] and not os.path.isfile (changes) : logging.warn ("E: build_slave: Failed to find %s file." % (changes)) retval = "build_changes" if not retval and checkValue ('debsignkey', self.settings) : command = "debsign -k%s %s" % (self.settings['debsignkey'], changes) if pybitclient.run_cmd (command, self.settings["dry_run"], logfile): retval = "build_sign" else: retval = "Can't find build dir." #If we have a message set we send back the message and failure return self._overall_success(retval, conn_data) def get_distribution (self) : return 'Debian' def __init__(self, settings): PackageHandler.__init__(self, settings) # Specific buildd options # FIXME: decide how this is managed and packaged # variables to retrieve from the job object later self.dput_cfg = "/etc/pybit/client/dput.cf" if not settings["dry_run"] : os.chdir (settings["buildroot"]) def createPlugin (settings) : return DebianBuildClient (settings) pybit-1.0.0/pybitclient/__init__.py0000644000175000017500000005161312145773156017217 0ustar neilneil00000000000000# Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import re import os import signal import imp import errno import json import time import logging import jsonpickle from amqplib import client_0_8 as amqp import pybit from pybit.models import TaskComplete, PackageInstance, ClientMessage, BuildRequest, CommandRequest, AMQPConnection,\ CancelRequest from pybitclient.buildclient import PackageHandler, VersionControlHandler import multiprocessing import socket import requests from requests.auth import HTTPBasicAuth class PyBITClient(object): def _clean_current(self): self.vcs_handler = None self.process = None self.current_msg = None self.current_request = None self.overall_success = None self.subprocess_message = None def set_status(self, status, request=None, client=None): if request is None: request = self.current_request if status is not None and request is not None: logging.debug("Marking JOB id: %s as: %s" % (request.get_job_id(), status)) # FIXME: this clears/resets 'cancelled' state payload = {'status': status} if client is not None: payload['client'] = client job_status_url = "http://%s/job/%s" % (request.web_host, request.get_job_id()) try: requests.put(job_status_url, payload, auth=HTTPBasicAuth('admin', 'pass')) except requests.exceptions.ConnectionError: pass else: logging.debug("Couldn't find status or current_request") def get_status(self, request=None): """Get the build request status from the controller via REST Returns the current job status, waiting if the controller cannot be contacted or None if the job doesn't exist """ if request is None: request = self.current_request if request is not None: job_status_get_url = "http://%s/job/%s/status" % (request.web_host, request.get_job_id()) r = requests.get(job_status_get_url) if r.status_code == 200 and r.headers['content-type'] == 'application/json': status_list = jsonpickle.decode(r.content) if len(status_list) > 0: return status_list[-1].status elif r.status_code == 404: return None else: return ClientMessage.waiting def republish_job(self, buildreq): if isinstance(buildreq, BuildRequest): routing_key = pybit.get_build_route_name(buildreq.get_dist(), buildreq.get_arch(), buildreq.get_suite(), buildreq.get_format()) try: msg = jsonpickle.encode(buildreq) self.message_chan.basic_publish(amqp.Message(msg), exchange=pybit.exchange_name, routing_key=routing_key, mandatory=True) except amqp.AMQPConnectionException as e: logging.debug("Couldn't connect to channel. traceback: %s" % e) def wait(self): time.sleep(self.poll_time) if self.state == "IDLE": msg = None if self.message_chan is not None: for suite in self.listen_list: msg = self.message_chan.basic_get(queue=self.listen_list[suite]['queue']) if msg: break if msg is not None: self.message_handler(msg) cmd = None if self.command_chan is not None: cmd = self.command_chan.basic_get(no_ack=True) if cmd is not None: self.command_handler(cmd) def move_state(self, new_state): if new_state in self.state_table: #FIXME: we can stack state handling in here. self.old_state = self.state self.state = new_state if self.state == "CHECKOUT": args = (self.current_request, self.conn_info) self.process = multiprocessing.Process(target=self.vcs_handler.fetch_source, args=args) self.process.start() self.set_status(ClientMessage.building, None, self.conn_info.client_name) elif self.state == "BUILD": # mark this as the moment that the build starts self.current_request.stamp_request() args = (self.current_request, self.conn_info) if self.current_request.job.packageinstance.master is True: self.process = multiprocessing.Process(target=self.format_handler.build_master, args=args) else: self.process = multiprocessing.Process(target=self.format_handler.build_slave, args=args) self.process.start() elif self.state == "CLEAN": args = (self.current_request, self.conn_info) self.process = multiprocessing.Process(target=self.vcs_handler.clean_source, args=args) self.process.start() elif self.state == "UPLOAD": args = (self.current_request, self.conn_info) self.process = multiprocessing.Process(target=self.format_handler.upload, args=args) self.process.start() elif self.state == "IDLE": overall_success = self.overall_success current_msg = self.current_msg current_req = self.current_request subprocess_message = self.subprocess_message self._clean_current() if current_msg is not None: self.message_chan.basic_ack(current_msg.delivery_tag) if overall_success is True: self.set_status(ClientMessage.done, current_req) elif overall_success is False: if subprocess_message == 'build-dep-wait': self.set_status(ClientMessage.blocked, current_req) self.republish_job(current_req) else: self.set_status(ClientMessage.failed, current_req) elif self.state == "FATAL_ERROR": current_req = self.current_request current_msg = self.current_msg self._clean_current() self.message_chan.basic_ack(current_msg.delivery_tag) self.set_status(ClientMessage.failed, current_req) self.republish_job(current_req) logging.debug("Moved from %s to %s" % (self.old_state, self.state)) else: logging.debug("Unhandled state: %s" % new_state) def plugin_handler(self): plugin = None vcs = None client = None plugins = [] plugin_dir = "/var/lib/pybit-client.d/" if not os.path.exists(plugin_dir): plugin_dir = os.path.realpath("./pybitclient/") for name in os.listdir(plugin_dir): if name.endswith(".py"): plugins.append(name.strip('.py')) for name in plugins: if name == "buildclient" or name == "__init__": continue plugin_path = [plugin_dir] fp, pathname, description = imp.find_module(name, plugin_path) try: mod = imp.load_module(name, fp, pathname, description) if not (hasattr(mod, 'createPlugin')): logging.error("Error: plugin path contains an unrecognised module '%s'." % name) return plugin = mod.createPlugin(self.settings) if hasattr(plugin, 'get_distribution') and plugin.get_distribution() is not None: client = plugin elif hasattr(plugin, 'method') and plugin.method is not None: vcs = plugin else: logging.error("Error: plugin path contains a recognised plugin but the plugin API for '%s' is incorrect." % name) return finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() if client: name = client.get_distribution() if name not in self.distros: self.distros[name] = client if vcs: if vcs.method not in self.handlers: self.handlers[vcs.method] = vcs logging.info("List of available handlers: %s" % list(self.handlers.keys())) logging.info("List of available distributions: %s" % list(self.distros.keys())) def idle_handler(self, msg, decoded): if isinstance(decoded, BuildRequest): self.current_msg = msg self.current_request = decoded try: status = self.get_status() if (status == ClientMessage.waiting or status == ClientMessage.blocked): self.vcs_handler = self.handlers[self.current_request.transport.method] if self.vcs_handler is None: self.overall_success = False self.move_state("IDLE") return self.move_state("CHECKOUT") elif status is None: self.move_state("IDLE") elif status == ClientMessage.cancelled: logging.debug("jobid: %s has been cancelled. Acking." % self.current_request.get_job_id()) self.move_state("IDLE") except Exception as requests.exceptions.ConnectionError: self.overall_success = False self.move_state("IDLE") def fatal_error_handler(self, msg, decoded): logging.debug("Fatal Error handler") def checkout_handler(self, msg, decoded): if isinstance(decoded, TaskComplete): self.process.join() if decoded.success is True: self.move_state("BUILD") else: self.overall_success = False self.move_state("CLEAN") def build_handler(self, msg, decoded): if isinstance(decoded, TaskComplete): self.process.join() if decoded.success is True: self.move_state("UPLOAD") else: self.overall_success = False self.subprocess_message = decoded.message self.move_state("CLEAN") def upload_handler(self, msg, decoded): if isinstance(decoded, TaskComplete): self.overall_success = decoded.success self.subprocess_message = decoded.message self.process.join() self.move_state("CLEAN") def clean_handler(self, msg, decoded): if isinstance(decoded, TaskComplete): self.process.join() if decoded.success is True: self.move_state("IDLE") else: self.overall_success = False self.move_state("FATAL_ERROR") def __init__(self, arch, distribution, pkg_format, suites, conn_info, settings) : self.state_table = {} self.state_table["UNKNOWN"] = self.fatal_error_handler self.state_table["IDLE"] = self.idle_handler self.state_table["FATAL_ERROR"] = self.fatal_error_handler self.state_table["CHECKOUT"] = self.checkout_handler self.state_table["BUILD"] = self.build_handler self.state_table["UPLOAD"] = self.upload_handler self.state_table["CLEAN"] = self.clean_handler self.overall_success = None self.state = "UNKNOWN" self.arch = arch self.distribution = distribution self.pkg_format = pkg_format self.listen_list = dict() self.conn = None self.command_chan = None self.message_chan = None self.settings = settings self.poll_time = 60 self.distros = {} self.handlers = {} if 'poll_time' in self.settings: self.poll_time = self.settings['poll_time'] for suite in suites: route = pybit.get_build_route_name(self.distribution, self.arch, suite, self.pkg_format) queue = pybit.get_build_queue_name(self.distribution, self.arch, suite, self.pkg_format) self.listen_list[suite] = { 'route': route, 'queue': queue} self.plugin_handler() self.conn_info = conn_info if self.distribution in self.distros: self.format_handler = self.distros[self.distribution] logging.info("Using %s build client" % self.distribution) elif self.pkg_format == "deb" and "Debian" in self.distros: self.format_handler = self.distros['Debian'] logging.warning("Using default Debian build client for %s package format" % self.pkg_format) else: logging.debug("Empty build client") self.format_handler = None self._clean_current() self.move_state("IDLE") def message_handler(self, msg): build_req = jsonpickle.decode(msg.body) if not isinstance(build_req, BuildRequest): self.message_chan.basic_ack(msg.delivery_tag) return if self.process: logging.debug("Detected a running process") self.state_table[self.state](msg, build_req) def command_handler(self, msg): cmd_req = jsonpickle.decode(msg.body) if (not isinstance(cmd_req, TaskComplete) and not isinstance(cmd_req, CommandRequest)): logging.debug("Can't handle message type.") self.command_chan.basic_ack(msg.delivery_tag) elif isinstance(cmd_req, CommandRequest): if isinstance(cmd_req, CancelRequest): logging.debug("Received CANCEL request for jobid: %s" % cmd_req.get_job_id()) self.set_status(ClientMessage.cancelled, cmd_req) if (self.current_request and self.current_request.get_job_id() == cmd_req.get_job_id() and self.process is not None): #We have to sigint because it's completely unsafe to sigkill an sbuild process. os.kill(self.process.pid, signal.SIGINT) self.process.join() self.process = None self.move_state("IDLE") else: logging.debug("Ignoring cancel request as no current request or id doesn't match.") else: logging.debug("Received COMMAND request for jobid:", cmd_req.get_job_id()) else: self.state_table[self.state](msg, cmd_req) def is_building(self): if self.format_handler.is_building(): # FIXME return True return False def connect(self): try: self.conn = amqp.Connection(host=self.conn_info.host, userid=self.conn_info.userid, password=self.conn_info.password, virtual_host=self.conn_info.vhost, insist=False) self.command_chan = self.conn.channel() self.message_chan = self.conn.channel() self.message_chan.basic_qos(0, 1, False) self.command_chan.exchange_declare(exchange=pybit.exchange_name, type="direct", durable=True, auto_delete=False) except socket.error as e: logging.debug("Couldn't connect rabbitmq server with: %s . Error: %s ." % repr(self.conn_info), str(e)) return False for suite, info in self.listen_list.items(): logging.debug("Creating queue with name:" + info['queue']) try: self.message_chan.queue_declare(queue=info['queue'], durable=True, exclusive=False, auto_delete=False) self.message_chan.queue_bind(queue=info['queue'], exchange=pybit.exchange_name, routing_key=info['route']) except amqp.exceptions.AMQPChannelException: logging.debug("Unable to declare or bind to message channel.") return False logging.debug("Creating private command queue with name:" + self.conn_info.client_name) try: self.command_chan.queue_declare(queue=self.conn_info.client_name, durable=False, exclusive=True, auto_delete=True) self.command_chan.queue_bind(queue=self.conn_info.client_name, exchange=pybit.exchange_name, routing_key=self.conn_info.client_name) except amqp.exceptions.AMQPChannelException: logging.debug("Unable to declare or bind to command channel %s. Does this client already exist?" % (self.conn_info.client_name, )) return False return True def disconnect(self): if self.conn: if self.command_chan: #self.command_chan.basic_cancel("build_callback") try: logging.debug("Closing down command channel") self.command_chan.close() except socket.error: pass if self.message_chan: #self.message_chan.basic_cancel("build_callback") try: logging.debug("Closing down message channel") self.message_chan.close() except socket.error: pass try: logging.debug("Closing down rabbitmq connection") self.conn.close() except socket.error: pass def __enter__(self): if self.connect(): return self else: return None def __exit__(self, type, value, traceback): self.disconnect() def run_cmd(cmd, simulate, logfile): """ returns zero on succes or the exit value of the command""" ret = 0 if simulate is True: logging.debug("I: Simulating: %s" % cmd) else: logging.debug("Running: %s" % cmd) if logfile is not None: command = cmd cmd = "%s >> %s 2>&1" % (command, logfile) ret = os.system (cmd) >> 8 if ret: logging.debug("%s returned error: %d" % (cmd, ret)) return ret def send_message(conn_data, msg): conn = None chan = None if conn_data is not None: conn = amqp.Connection(host=conn_data.host, userid=conn_data.userid, password=conn_data.password, virtual_host=conn_data.vhost, insist=True) chan = conn.channel() task = None if msg == "success": task = TaskComplete(msg, True) else: task = TaskComplete(msg, False) if conn and chan: chan.basic_publish(amqp.Message(task.toJson()), exchange=pybit.exchange_name, routing_key=conn_data.client_name) chan.close() conn.close() else: logging.debug("I: Simulating sending message: %s " % msg) def get_settings(path): try: ret = {} if type(path) != str: # passed self or some other object, assume default path = "client.conf" if os.path.isfile(path): pass elif os.path.isfile("/etc/pybit/client/client.conf") : path = "/etc/pybit/client/client.conf" else: return ret except Exception as e: raise Exception('Cannot access path to config file: ' + str(e)) return try: fh = open(path, "r") file_contents = fh.read() return json.loads(file_contents) except IOError as e: raise Exception("Cannot open config file for reading: " + str(e)) return except Exception as e: raise Exception("Unhandled JSON error" + str(e)) return def mkdir_p(path): try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST: pass else: raise Exception("Exception" + str(exc)) return pybit-1.0.0/pybitclient/buildclient.py0000644000175000017500000000520212145774554017752 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # buildclient.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os class VersionControlHandler(object): def fetch_source(self): pass def get_srcdir(self): pass def clean_source(self, pkg): pass # support test cases def is_dry_run(self): return self.settings["dry_run"] def __init__(self, settings): self.workdir = "" self.settings = settings if not "dry_run" in self.settings: self.settings["dry_run"] = True if not "buildroot" in self.settings: self.settings["buildroot"] = "/tmp/buildd" class PackageHandler(object): chan = None logdir = None def __init__(self, settings): self.settings = settings if not "dry_run" in self.settings: self.settings["dry_run"] = True if not "buildroot" in self.settings: self.settings["buildroot"] = "/tmp/buildd" self.logdir = os.path.join(self.settings["buildroot"], "logs") if not os.path.isdir(self.logdir) and not self.settings["dry_run"]: os.mkdir(self.logdir) def get_buildlog(self, buildroot, buildreq): logfile = None stamp = buildreq.get_buildstamp() if stamp is not None: log = "%s_%s-%s-%s" % (buildreq.get_package(), buildreq.get_version(), buildreq.get_arch(), buildreq.get_buildstamp()) logfile = os.path.join(self.logdir, log) return logfile def is_dry_run(self): return self.settings["dry_run"] def build_master(self, buildroot): pass def build_slave(self, buildroot): pass def update_environment(self, name, pkg): pass def upload(self, dirname, changes, pkg): pass def get_distribution(self): pass pybit-1.0.0/pybitclient/sbuild-orig.sh0000755000175000017500000000316112053435066017651 0ustar neilneil00000000000000#!/bin/sh # Copyright 2012 Neil Williams # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. set -e DIR=$1 TGT=$2 /usr/lib/pbuilder/pbuilder-satisfydepends-classic --control ${1}/debian/control # non-native packages commonly need a customised command to prepare the # released tarball (which is what dpkg-buildpackage needs) from the # VCS export/clone (from which the tarball was originally built). # Building the tarball from VCS commonly requires the build-dependencies # of the package to be installed, so this needs to happen inside the chroot. # If the custom command is not available in debian/rules, clone the # debian/ directory to a new repo and add it, then add any extra build-deps # to debian/control and point the build at the new repo. Add or fix the # debian/watch file if necessary. (cd $1 ; fakeroot debian/rules $2 ) (cd $1 ; fakeroot dpkg-buildpackage -nc -S -d -uc -us) pybit-1.0.0/pybitclient/README0000644000175000017500000000156412145726703015762 0ustar neilneil00000000000000Creating new build environments mkdir /mnt/pybit lvcreate -n schroot-pybit -L1G buildd mke2fs -j /dev/buildd/schroot-pybit mount /dev/buildbox/schroot-pybit /mnt debootstrap --include=sudo,fakeroot,build-essential,debfoster,apt,vim --variant=buildd --arch=i386 --keyring=/etc/apt/trusted.gpg wheezy /mnt /pybit http://mirror/debian umount /mnt/pybit Under /etc/schroot/chroot.d/ : [squeeze] type=lvm-snapshot device=/dev/buildd/schroot-wheezy description=Debian Wheezy priority=5 users=buildd,sbuild root-users=buildd source-root-users=root,buildd aliases=stable,development,default lvm-snapshot-options=--size 2G Edit LVM chroot sourced packages with schroot -c wheezy-source: # schroot -c wheezy-source (sid)# echo 'APT::Install-Recommends "false";' > /etc/apt/apt.conf.d/50recommends (sid)# apt-get install build-essential pbuilder devscripts fakeroot (sid)# apt-get clean pybit-1.0.0/pybitclient/git.py0000644000175000017500000000620612145766634016244 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # git.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os import pybitclient from pybitclient.buildclient import VersionControlHandler import logging class GitClient(VersionControlHandler): def fetch_source(self, buildreq, conn_data): retval = None if buildreq.transport.method != "git": retval = "wrong_method" if not retval : self.workdir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) if (buildreq.transport.vcs_id is not None): command = "(git clone %s %s ; cd %s ; git checkout %s)" % (buildreq.transport.uri, self.workdir, self.workdir, buildreq.transport.vcs_id) elif (buildreq.transport.uri is not None): command = "git clone %s %s" % (buildreq.transport.uri, self.workdir) else: logging.warn ("E: Could not fetch source, no method URI found") retval = "unrecognised uri" if not retval : if pybitclient.run_cmd (command, self.settings["dry_run"], None) : retval = "fetch_source" if not retval : retval = "success" pybitclient.send_message (conn_data, retval) if retval == "success": return 0 else : return 1 def get_srcdir (self): return self.workdir def clean_source (self, buildreq, conn_data) : retval = None if buildreq.transport.method != "git": retval = "wrong_method" if not retval : self.cleandir = os.path.join (self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) command = "rm -rf %s*" % (self.cleandir) if pybitclient.run_cmd (command, self.settings["dry_run"], None) : retval = "failed_clean" retval = "success" pybitclient.send_message (conn_data, retval) # return the exit value of the process - exit (0) for success. if retval == "success": return 0 else : return 1 def __init__(self, settings): VersionControlHandler.__init__(self, settings) self.method = "git" def createPlugin (settings) : return GitClient (settings) pybit-1.0.0/pybitclient/crossdebian.py0000644000175000017500000001514212145774317017751 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # crossdebian.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os import logging import pybitclient from pybitclient.buildclient import PackageHandler from pybit.models import BuildRequest class DebianCrossClient(PackageHandler): dput_cfg = "" dput_dest = "" def update_environment(self, name, pkg, conn_data): retval = "success" command = "schroot -u root -c %s -- apt-get update > /dev/null 2>&1" % name if pybitclient.run_cmd(command, self.settings["dry_run"], None): retval = "build_update" pybitclient.send_message(conn_data, retval) if retval == "success": return 0 else: return 1 def build_master(self, buildreq, conn_data): retval = None logfile = self.get_buildlog(self.settings["buildroot"], buildreq) if not isinstance(buildreq, BuildRequest): logging.debug("E: not able to identify package name.") retval = "misconfigured" pybitclient.send_message(conn_data, retval) return srcdir = os.path.join(self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) package_dir = "%s/%s" % (srcdir, buildreq.get_package()) command = "(cd %s ; dpkg-buildpackage -S -d -uc -us)" % package_dir if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "build-dep-wait" if not retval: command = "sbuild -n --debbuildopt=\"-a%s\" --setup-hook=\"/usr/bin/sbuild-cross.sh\" --arch=%s -A -s -d %s %s/%s_%s.dsc" % ( buildreq.get_arch(), buildreq.get_arch(), buildreq.get_suite(), srcdir, buildreq.get_package(), buildreq.get_version()) if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "build_binary" if not retval: changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not self.settings["dry_run"] and not os.path.isfile(changes): logging.debug("build_master: Failed to find %s file." % changes) retval = "build_changes" if not retval: retval = "success" pybitclient.send_message(conn_data, retval) if retval == "success": return 0 else: return 1 def upload(self, buildreq, conn_data): retval = None logfile = self.get_buildlog(self.settings["buildroot"], buildreq) changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not os.path.isfile(changes) and not self.settings["dry_run"]: logging.debug("upload: Failed to find %s file." % changes) retval = "upload_changes" if not retval: command = "dput -c %s %s %s %s" % (self.dput_cfg, self.settings["dput"], self.settings["dput_dest"], changes) if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "upload_fail" if not retval: command = "dcmd rm %s" % changes if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "post-upload-clean-fail" if not retval: retval = "success" pybitclient.send_message(conn_data, retval) if retval == "success": return 0 else: return 1 def build_slave(self, buildreq, conn_data): retval = None logfile = self.get_buildlog(self.settings["buildroot"], buildreq) srcdir = os.path.join(self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) package_dir = "%s/%s" % (srcdir, buildreq.get_package()) if os.path.isdir(package_dir) or self.settings["dry_run"]: command = "(cd %s ; dpkg-buildpackage -S -d -uc -us)" % package_dir if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "build_dsc" if not retval: command = "sbuild -n --apt-update -d %s %s/%s_%s.dsc" % ( buildreq.get_suite(), srcdir, buildreq.get_package(), buildreq.get_version()) if pybitclient.run_cmd(command, self.settings["dry_run"], logfile): retval = "build_binary" if not retval: changes = "%s/%s_%s_%s.changes" % (self.settings["buildroot"], buildreq.get_package(), buildreq.get_version(), buildreq.get_arch()) if not self.settings["dry_run"] and not os.path.isfile(changes): logging.debug("build_slave: Failed to find %s file." % changes) retval = "build_changes" else: retval = "Can't find build dir." if not retval: retval = "success" pybitclient.send_message(conn_data, retval) if retval == "success": return 0 else: return 1 def get_distribution(self): return 'Debian-Cross' def __init__(self, settings): PackageHandler.__init__(self, settings) # Specific buildd options # FIXME: decide how this is managed and packaged # variables to retrieve from the job object later self.dput_cfg = "/etc/pybit/client/dput.cf" if not settings["dry_run"]: os.chdir(settings["buildroot"]) def createPlugin(settings): return DebianCrossClient(settings) pybit-1.0.0/pybitclient/apt.py0000644000175000017500000001140312145776046016236 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # apt.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os import logging import pybitclient from pybitclient.buildclient import VersionControlHandler class AptClient(VersionControlHandler): def fetch_source(self, buildreq, conn_data): retval = None command = None if buildreq.transport.method != "apt": retval = "wrong_method" if not retval: self.workdir = os.path.join(self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) if not os.path.isdir(self.workdir): pybitclient.mkdir_p(self.workdir) apt_path = os.path.join(self.workdir, "lists", "partial") pybitclient.mkdir_p(apt_path) apt_path = os.path.join(self.workdir, "archives", "partial") pybitclient.mkdir_p(apt_path) apt_path = os.path.join(self.workdir, "etc", "apt", "preferences.d") pybitclient.mkdir_p(apt_path) apt_path = os.path.join(self.workdir, "sources.list") src_list = os.open(apt_path, os.O_CREAT | os.O_WRONLY) url = "deb-src http://cdn.debian.net/debian %s main " % buildreq.get_suite() os.write(src_list, url) cfg_str = "-o Apt::Get::AllowUnauthenticated=true -o Dir=%s -o Dir::State=%s -o Dir::Etc::SourceList=%s/sources.list -o Dir::Cache=%s" % \ (self.workdir, self.workdir, self.workdir, self.workdir) command = "(cd %s && apt-get %s update 2>/dev/null || true)" % (self.workdir, cfg_str) if not retval: if pybitclient.run_cmd(command, self.settings["dry_run"], None): retval = "update_apt" if buildreq.get_version() is not None: command = "(cd %s/.. && apt-get %s -d source %s=%s )" % (self.workdir, cfg_str, buildreq.get_package(), buildreq.get_version()) else: command = "(cd %s && apt-get %s -d source %s )" % (self.workdir, cfg_str, buildreq.get_package()) if not retval: if pybitclient.run_cmd(command, self.settings["dry_run"], None): retval = "fetch_source" if not retval: retval = "success" pybitclient.send_message(conn_data, retval) # return the exit value of the process - exit (0) for success. if retval == "success": return 0 else: return 1 def get_srcdir(self): return self.workdir def clean_source(self, buildreq, conn_data): retval = None if buildreq.transport.method != "apt": retval = "wrong_method" if not retval: src_dir = os.path.join(self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method) src_changes = "%s/%s_%s.dsc" % (src_dir, buildreq.get_package(), buildreq.get_version()) command = "dcmd rm -f %s" % src_changes if not os.path.exists(src_changes): retval = "success" elif pybitclient.run_cmd(command, self.settings["dry_run"], None): retval = "source-clean-fail" if not retval: self.cleandir = os.path.join(self.settings["buildroot"], buildreq.get_suite(), buildreq.transport.method, buildreq.get_package()) command = "rm -rf %s/" % self.cleandir if pybitclient.run_cmd(command, self.settings["dry_run"], None): retval = "failed_clean" if not retval: retval = "success" pybitclient.send_message(conn_data, retval) if retval == "success": return 0 else: return 1 def __init__(self, settings): VersionControlHandler.__init__(self, settings) self.method = "apt" def createPlugin(settings): return AptClient(settings) pybit-1.0.0/application.wsgi0000755000175000017500000000131512106502745015742 0ustar neilneil00000000000000#!/usr/bin/python # Change working directory so relative paths (and template lookup) work again import os os.chdir(os.path.dirname(__file__)) import jsonpickle import optparse import site import pybit import pybitweb import logging import sys from pybitweb.db import Database from pybitweb.controller import Controller (settings, opened_path) = pybit.load_settings("web/web.conf") FORMAT = '%(asctime)s %(filename)s:%(lineno)d %(msg)s' logging.basicConfig( stream=sys.stderr, level=logging.WARN) logging.basicConfig( format=FORMAT ) myDb = Database(settings['db']) # singleton instance buildController = Controller(settings, myDb) # singleton instance application = pybitweb.get_app(settings,myDb,buildController) pybit-1.0.0/pybit_web.py0000755000175000017500000000527612145767605015127 0ustar neilneil00000000000000#!/usr/bin/python # pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from pybitweb.db import Database from pybitweb.controller import Controller import optparse import pybitweb import bottle import pybit import logging import sys META="PYBIT_WEB_" if __name__ == '__main__': parser = optparse.OptionParser() #options we can override in the config file. groupConfigFile = optparse.OptionGroup(parser, "Config File Defaults","All the options which have defaults read from a config file.") parser.add_option_group(groupConfigFile) parser.add_option_group(groupConfigFile) parser.add_option("--config", dest="config", default="web/web.conf", help="Config file to read settings from, defaults to web.conf which will be read from configs/ and /etc/pybit/ in turn.", metavar=META + "CONF_FILE") parser.add_option("-v", dest="verbose", action="store_true", default=False, help="Turn on verbose messages.", metavar=META+"VERBOSE") (options, args) = parser.parse_args() (settings, opened_file) = pybit.load_settings(options.config) settings = pybit.merge_options(settings, groupConfigFile, options) FORMAT = '%(asctime)s %(filename)s:%(lineno)d %(msg)s' logging.basicConfig( stream=sys.stderr, level=logging.WARN) logging.basicConfig( format=FORMAT ) myDb = Database(settings['db']) # singleton instance buildController = Controller(settings, myDb) # singleton instance - Needs access to both controller and web settings # try: app = pybitweb.get_app(settings, myDb, buildController) bottle.debug(options.verbose) bottle.run(app=app, server=settings['web']['app'], host=settings['web']['interface'], port=settings['web']['port'], reloader=settings['web']['reloader']) pybit-1.0.0/makeme0000755000175000017500000000004312145727327013733 0ustar neilneil00000000000000#!/bin/bash python setup.py sdist pybit-1.0.0/test/0000755000175000017500000000000012146006064013515 5ustar neilneil00000000000000pybit-1.0.0/test/webapi-test.py0000644000175000017500000000303112145770013016311 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # untitled.py # # Copyright 2012 simonh # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. #from common.models import transport, packageinstance, job import logging import sys import unittest class TestWeb(unittest.TestCase) : def setUp (self): return def test_01 (self) : return if __name__ == '__main__': FORMAT = '%(msg)s' logging.basicConfig(format=FORMAT) logging.basicConfig( stream=sys.stderr ) logging.getLogger( "testCase" ).setLevel( logging.DEBUG ) suite = unittest.TestLoader().loadTestsFromTestCase(TestWeb) unittest.TextTestRunner(verbosity=2).run(suite) runner = unittest.TextTestRunner(verbosity=2) res = runner.run(suite) if not res.wasSuccessful() : sys.exit (1) pybit-1.0.0/test/plugintest.py0000644000175000017500000000725212145770015016276 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # plugintest.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import imp import os import logging import sys import unittest import pybit handler_api = [ 'clean_source', 'fetch_source', 'get_srcdir', 'is_dry_run', 'method' ] build_api = [ 'build_master', 'build_slave', 'update_environment', 'upload' ] class TestClient(unittest.TestCase) : def setUp (self): return options = {} def test_01_plugin (self) : log = logging.getLogger( "testCase" ) log.debug(" ") plugin = None vcs = None client = None plugins = [] distros = {} handlers = {} plugin_dir = "/var/lib/pybit-client.d/" (settings, opened_path) = pybit.load_settings("client/client.conf") if not os.path.exists (plugin_dir): plugin_dir = os.path.join (os.getcwd(), "pybitclient/") self.assertTrue (os.path.isdir(plugin_dir)) for name in os.listdir(plugin_dir): if name.endswith(".py"): plugins.append(name.strip('.py')) for name in plugins : if (name == "buildclient" or name == "__init__"): continue plugin_path = [ plugin_dir ]; fp, pathname, description = imp.find_module(name, plugin_path) try: mod = imp.load_module(name, fp, pathname, description) self.assertTrue (mod) self.assertTrue (hasattr(mod, 'createPlugin')) plugin = mod.createPlugin(settings) self.assertTrue (plugin) self.assertTrue (hasattr(plugin, 'get_distribution') or hasattr(plugin, 'method')) if (hasattr(plugin, 'get_distribution') and plugin.get_distribution() is not None) : client = plugin elif (hasattr(plugin, 'method') and plugin.method is not None) : vcs = plugin else : self.assertTrue(False) continue finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() if client: name = client.get_distribution() if (name not in distros) : distros[name] = client if vcs : if (vcs.method not in handlers) : handlers[vcs.method] = vcs; self.assertTrue (len(handlers.keys()) > 0) self.assertTrue (len(distros.keys()) > 0) def main(): FORMAT = '%(msg)s' logging.basicConfig(format=FORMAT) logging.basicConfig( stream=sys.stderr ) logging.getLogger( "testCase" ).setLevel( logging.DEBUG ) suite = unittest.TestLoader().loadTestsFromTestCase(TestClient) runner = unittest.TextTestRunner(verbosity=2) res = runner.run(suite) if not res.wasSuccessful() : sys.exit (1) return 0 if __name__ == '__main__': main() pybit-1.0.0/test/pybitclient-test.py0000644000175000017500000000702412145770007017401 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # pybitclient-test.py # # Copyright 2012 Neil Williams # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import os import logging import sys import json import unittest # needs PYTHONPATH=.:.. import pybit from pybitclient.buildclient import PackageHandler, VersionControlHandler from pybitclient.debianclient import DebianBuildClient from pybitclient.subversion import SubversionClient from pybitclient.git import GitClient from pybitclient.apt import AptClient class TestClient(unittest.TestCase) : def setUp (self): return options = {} def test_01_client_config (self) : log = logging.getLogger( "testCase" ) log.debug(" ") conffile = "%s/configs/client/client.conf" % (os.getcwd()); self.assertTrue (os.path.isfile(conffile), "could not find %s" % conffile) log.debug("I: reading %s" % (os.path.relpath(conffile, os.getcwd()))) (self.options, opened_path) = pybit.load_settings(conffile) if not "dry_run" in self.options : msg = "I: asserting dry_run for test cases" log.debug (msg) self.options["dry_run"] = True elif self.options["dry_run"] == False : msg = "I: overriding dry_run for test cases" log.debug (msg) self.options["dry_run"] = True else : msg = "I: dry_run already set." log.debug(msg) def test_02_build_client (self) : log = logging.getLogger( "testCase" ) log.debug(" ") if not "dry_run" in self.options : msg = "I: asserting dry_run for test cases" log.debug (msg) self.options["dry_run"] = True base_client = PackageHandler(self.options) self.assertTrue (base_client) self.assertTrue (base_client.is_dry_run()) deb_client = DebianBuildClient(self.options) self.assertTrue (deb_client) self.assertTrue (deb_client.is_dry_run()) svn_client = SubversionClient(self.options) self.assertTrue (svn_client) self.assertTrue (svn_client.is_dry_run()) apt_client = AptClient(self.options) self.assertTrue (apt_client) self.assertTrue (apt_client.is_dry_run()) git_client = GitClient(self.options) self.assertTrue (git_client) self.assertTrue (git_client.is_dry_run()) def main(): FORMAT = '%(msg)s' logging.basicConfig(format=FORMAT) logging.basicConfig( stream=sys.stderr ) logging.getLogger( "testCase" ).setLevel( logging.DEBUG ) suite = unittest.TestLoader().loadTestsFromTestCase(TestClient) runner = unittest.TextTestRunner(verbosity=2) res = runner.run(suite) if not res.wasSuccessful() : sys.exit (1) return 0 if __name__ == '__main__': main() pybit-1.0.0/examples/0000755000175000017500000000000012146006064014354 5ustar neilneil00000000000000pybit-1.0.0/examples/procmail-mbox.py0000755000175000017500000000476112145773262017524 0ustar neilneil00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # procmail-mbox.py # # Copyright 2013 Neil Williams # Copyright 2002 Noah Spurrier # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # Derived from http://code.activestate.com/recipes/157437-reading-and-writing-mbox-style-mailbox-files/ # The purpose of this file is to emulate procmail handling for a # subscription to debian-devel-changes to help testing of the # changes-debian hook. Pass a standard mbox as the first argument # to the script. e.g. # ./procmail-mbox.py mbox # This example is hard-coded to use the changes-debian hook # in ../hook/ import mailbox, rfc822 import sys, os, string, re from subprocess import Popen, PIPE, STDOUT def passthrough_filter(msg, document): """If you want to extend this to act as a filter on the changes file being parsed, simply return None here. msg contains the full email, with headers. document is just the .changes file. """ return document def process_mailbox(mailboxname_in, filter_function): """This processes a each message in the 'in' mailbox. Each message is passed to the filter_function.""" # Open the mailbox. mb = mailbox.UnixMailbox(file(mailboxname_in,'r')) msg = mb.next() while msg is not None: # Properties of msg cannot be modified, so we pull out the # document to handle it separately. document = msg.fp.read() document = filter_function(msg, document) if document is not None: p = Popen(['../hook/changes-debian'], stdout=PIPE, stdin=PIPE, stderr=STDOUT) grep_stdout = p.communicate(input=document)[0] print grep_stdout msg = mb.next() def main(): mailboxname_in = sys.argv[1] process_mailbox(mailboxname_in, passthrough_filter) return 0 if __name__ == '__main__': main() pybit-1.0.0/pybitweb/0000755000175000017500000000000012146006064014363 5ustar neilneil00000000000000pybit-1.0.0/pybitweb/controller.py0000644000175000017500000005273612145767740017153 0ustar neilneil00000000000000#!/usr/bin/python # pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import response from amqplib import client_0_8 as amqp import jsonpickle import os import pybit import logging from pybit.models import BuildRequest, CancelRequest class Controller(object): def get_amqp_channel(self): chan = None try: conn = amqp.Connection(host=self.settings['controller']['rabbit_url'], userid=self.settings['controller']['rabbit_userid'], password=self.settings['controller']['rabbit_password'], virtual_host=self.settings['controller']['rabbit_virtual_host'], insist=self.settings['controller']['rabbit_insist']) try: chan = conn.channel() chan.exchange_declare(exchange=pybit.exchange_name, type="direct", durable=True, auto_delete=False) except amqp.AMQPChannelException: pass except amqp.AMQPConnectionException: pass return chan def __init__(self, settings, db): self.db = db self.settings = settings self.log = logging.getLogger( "controller" ) if (('debug' in self.settings['controller']) and ( self.settings['controller']['debug'])) : self.log.setLevel( logging.DEBUG ) self.log.debug("Controller constructor called.") def process_job(self, dist, architectures, version, name, suite, pkg_format, transport, build_environment = None) : try: # Look at blacklist, dont build excluded package names if self.db.check_blacklist("name",name): return False # Look at blacklist, dont build packages from SVN paths which match the blacklist rule. if self.db.check_blacklist("vcs_uri",transport.uri): return False except Exception as e: print(("Exception checking blacklist " + str(e))) return False try: current_package = self.process_package(name, version) if not current_package.id : return False current_suite = self.db.get_suite_byname(suite)[0] current_dist = self.db.get_dist_byname(dist)[0] current_format = self.db.get_format_byname(pkg_format)[0] build_env_suite_arch = self.process_build_environment_architectures(current_suite,architectures,build_environment) if len(build_env_suite_arch) == 0: self.log.warn("INVALID BUILD ENV SUITE ARCH MAPPINGS FOR %s, %s, %s - CHECK YOU HAVE SOME CONFIGURED.", current_suite,architectures,build_environment) response.status = "500 - Error submitting job" return else: current_build_env = build_env_suite_arch[0].buildenv master_flag = True chan = self.get_amqp_channel() for build_env_suite_arch in build_env_suite_arch : current_arch = build_env_suite_arch.suitearch.arch if current_build_env and current_build_env.name != build_env_suite_arch.get_buildenv_name() : #FIXME #first packageinstance for each build environment should have master flag set master_flag = True current_build_env = build_env_suite_arch.buildenv current_packageinstance = self.process_packageinstance(current_build_env, current_arch, current_package, current_dist, current_format, current_suite, master_flag) if current_packageinstance.id : new_job = self.db.put_job(current_packageinstance,None) if (new_job and new_job.id): # self.log.debug("\nCREATED NEW JOB: %s\n", jsonpickle.encode(new_job)) self.cancel_superceded_jobs(new_job) # NEW STUFF FOR RESUBMITTING JOBS build_request_obj = BuildRequest(new_job,transport, "%s:%s" % (self.settings['web']['hostname'], self.settings['web']['port'])); build_req = jsonpickle.encode(build_request_obj) self.db.log_buildRequest(build_request_obj) #print "SENDING REQUEST WITH DATA", str(build_req) msg = amqp.Message(build_req) msg.properties["delivery_mode"] = 2 routing_key = pybit.get_build_route_name(new_job.packageinstance.get_distribution_name(), new_job.packageinstance.get_arch_name(), new_job.packageinstance.get_suite_name(), new_job.packageinstance.get_format_name()) build_queue = pybit.get_build_queue_name(new_job.packageinstance.get_distribution_name(), new_job.packageinstance.get_arch_name(), new_job.packageinstance.get_suite_name(), new_job.packageinstance.get_format_name()) self.add_message_queue(build_queue, routing_key, chan) chan.basic_publish(msg,exchange=pybit.exchange_name,routing_key=routing_key,mandatory=True) #self.log.debug("\n____________SENDING %s ____________TO____________ %s", build_req, routing_key) self.log.debug("SENDING BUILD REQUEST FOR JOB ID %i %s %s %s %s %s %s", new_job.id, new_job.packageinstance.get_distribution_name(), new_job.packageinstance.get_package_version(), new_job.packageinstance.get_distribution_name(), new_job.packageinstance.get_arch_name(), new_job.packageinstance.get_suite_name(), new_job.packageinstance.get_format_name()) else : self.log.warn("FAILED TO ADD JOB") response.status = "404 - failed to add job." return False master_flag = False else : self.log.warn("PACKAGE INSTANCE ERROR") response.status = "404 - failed to add/retrieve package instance." return False except Exception as e: raise Exception('Error submitting job: ' + str(e)) response.status = "500 - Error submitting job" return False return True # success def add_message_queue(self, queue, routing_key, chan): self.log.debug("CREATING %s", chan.queue_declare(queue=queue, durable=True, exclusive=False, auto_delete=False)) self.log.debug("BINDING %s, %s, %s", queue, routing_key, chan.queue_bind(queue=queue, exchange=pybit.exchange_name, routing_key=routing_key)) return def process_build_environment_architectures(self, suite, requested_arches, requested_environment) : self.log.debug("REQUESTED ARCHITECTURES: %s, BUILD ENV: %s", requested_arches, requested_environment) env_arches_to_build = list() supported_build_env_suite_arches = self.db.get_supported_build_env_suite_arches(suite.name) if (len(supported_build_env_suite_arches) == 0): response.status = "404 - no supported build environments arch combinations for this suite." else : if ("all" in requested_arches) and ("any" not in requested_arches) : # this is an arch-all request so we only need to build for the first supported arch (for each build env) self.log.debug("ARCH-ALL REQUEST, ONLY BUILD FIRST SUPPORTED ARCH IN EACH BUILD ENV/ARCH COMBINATION MATCHING (%s, %s)", suite.name, requested_environment) supported_build_env_name = "" for build_env_suite_arch in supported_build_env_suite_arches : if ((requested_environment == None) or (requested_environment == build_env_suite_arch.get_buildenv_name())) : if supported_build_env_name != build_env_suite_arch.get_buildenv_name() : supported_build_env_name = build_env_suite_arch.get_buildenv_name() env_arches_to_build.append(build_env_suite_arch) self.log.debug(" ADDING (%s, %s, %s, %i)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight()) else : self.log.debug(" IGNORING (%s, %s, %s, %i)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight()) else : self.log.debug(" IGNORING (%s, %s, %s, %i) DOES NOT MATCH REQUESTED BUILD ENV (%s)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight(), requested_environment) elif ("any" in requested_arches) : self.log.debug("ARCH-ALL-ANY REQUEST, BUILDING FOR ALL SUPPORTED BUILD ENV/ARCH COMBINATIONS MATCHING (%s, %s)", suite.name, requested_environment) for build_env_suite_arch in supported_build_env_suite_arches : if ((requested_environment == None) or (requested_environment == build_env_suite_arch.get_buildenv_name())) : env_arches_to_build.append(build_env_suite_arch) self.log.debug(" ADDING (%s, %s, %s, %i)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight()) else : self.log.debug(" IGNORING (%s, %s, %s, %i) DOES NOT MATCH REQUESTED BUILD ENV (%s)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight(), requested_environment) else : self.log.debug("SPECIFIC ARCH (%s) BUILD ENV (%s) REQUEST...%i SUPPORTED CONFIGURATIONS", requested_arches, requested_environment,len(supported_build_env_suite_arches)) for build_env_suite_arch in supported_build_env_suite_arches : if ((build_env_suite_arch.get_arch_name() in requested_arches) and ((requested_environment is None) or (requested_environment == build_env_suite_arch.get_buildenv_name()))) : env_arches_to_build.append(build_env_suite_arch) self.log.debug(" ADDING (%s, %s, %s, %i)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight()) else : self.log.debug(" IGNORING (%s, %s, %s, %i)", build_env_suite_arch.get_suite_name(), build_env_suite_arch.get_arch_name(), build_env_suite_arch.get_buildenv_name(), build_env_suite_arch.get_master_weight()) return env_arches_to_build def process_package(self, name, version) : # retrieve existing package or try to add a new one package = None matching_package_versions = self.db.get_package_byvalues(name,version) if len(matching_package_versions) > 0 : package = matching_package_versions[0] if package.id : self.log.debug("MATCHING PACKAGE FOUND (%i, %s, %s)", package.id, package.name, package.version) else : # add new package to db package = self.db.put_package(version,name) if package.id : self.log.debug("ADDED NEW PACKAGE (%i, %s, %s)", package.id, package.name, package.version) else : self.log.warn("FAILED TO ADD NEW PACKAGE (%s, %s)", package.name, package.version) return package def process_packageinstance(self, build_env, arch, package, dist, pkg_format, suite, master) : # check & retrieve existing package or try to add a new one packageinstance = None if self.db.check_specific_packageinstance_exists(build_env, arch, package, dist, pkg_format, suite) : # retrieve existing package instance from db packageinstance = self.db.get_packageinstance_byvalues(package, build_env, arch, suite, dist, pkg_format)[0] if packageinstance.id : self.log.debug("MATCHING PACKAGE INSTANCE FOUND (%i, MASTER: %s) FOR [%s, %s, %s, %s, %s, %s, %s]", packageinstance.id, packageinstance.master, package.name, package.version, (build_env.name if build_env else "BUILD ENV (NONE)"), arch.name, dist.name, pkg_format.name, suite.name) # Temporarily disable master update for Issue #84, this should not be default behaviour. # if packageinstance.master != master : # self.log.debug("UPDATING PACKAGE INSTANCE MASTER FLAG (%s)", master) # self.db.update_packageinstance_masterflag(packageinstance.id,master) # packageinstance.master = master else : # add new package instance to db packageinstance = self.db.put_packageinstance(package, build_env, arch, suite, dist, pkg_format, master) if packageinstance.id : self.log.debug("ADDED NEW PACKAGE INSTANCE (%i, MASTER: %s) FOR [%s, %s, %s, %s, %s, %s, %s]", packageinstance.id, packageinstance.master, package.name, package.version, (build_env.name if build_env else "BUILD ENV (NONE)"), arch.name, dist.name, pkg_format.name, suite.name) else : self.log.warn("FAILED TO ADD NEW PACKAGE INSTANCE FOR [%s, %s, %s, %s, %s, %s, %s]", package.name, package.version, (build_env.name if build_env else "BUILD ENV (NONE)"), arch.name, dist.name, pkg_format.name, suite.name) return packageinstance def process_cancel(self, job, chan): job_status_history = self.db.get_job_statuses(job.id) last_status = job_status_history[-1].status build_client = job_status_history[-1].buildclient if (len(job_status_history) > 0) and (last_status == "Building") and (build_client != None) : cancel_req = jsonpickle.encode(CancelRequest(job,"%s:%s" % (self.settings['web']['hostname'], self.settings['web']['port']))) msg = amqp.Message(cancel_req) msg.properties["delivery_mode"] = 2 self.log.debug("UNFINISHED JOB ID %i, STATUS: %s, SENDING CANCEL REQUEST TO: %s", job.id, last_status, build_client) chan.basic_publish(msg,exchange=pybit.exchange_name,routing_key=build_client) else : self.log.debug("UNFINISHED JOB ID %i, STATUS: %s, UPDATE STATUS TO 'Cancelled'", job.id, last_status) self.db.put_job_status(job.id, "Cancelled", build_client) return def cancel_superceded_jobs(self, new_job) : # check for unfinished jobs that might be cancellable packageinstance = new_job.packageinstance unfinished_jobs_list = self.db.get_unfinished_jobs() chan = self.get_amqp_channel() for unfinished_job in unfinished_jobs_list: unfinished_job_package_name = unfinished_job.packageinstance.get_package_name() if unfinished_job_package_name == packageinstance.get_package_name() : if new_job.id != unfinished_job.id : unfinished_job_package_version = unfinished_job.packageinstance.get_package_version() command = "dpkg --compare-versions %s '<<' %s" % (packageinstance.get_package_version(), unfinished_job_package_version) if (unfinished_job_package_version == packageinstance.get_package_version()) or (os.system (command)) : unfinished_job_dist_id = unfinished_job.packageinstance.distribution.id unfinished_job_arch_id = unfinished_job.packageinstance.arch.id unfinished_job_suite_id = unfinished_job.packageinstance.suite.id if (unfinished_job_dist_id == packageinstance.distribution.id) and (unfinished_job_arch_id == packageinstance.arch.id) and (unfinished_job_suite_id == packageinstance.suite.id) : #check build env... if (((unfinished_job.packageinstance.build_env is None) and (packageinstance.build_env is None)) or (unfinished_job.packageinstance.build_env.id == packageinstance.build_env.id)): self.process_cancel(unfinished_job, chan) # else : # self.log.debug("IGNORING UNFINISHED JOB (%i, %s, %s) BUILD ENV DIFFERS", unfinished_job.id, unfinished_job_package_name, unfinished_job_package_version) # else : # self.log.debug("IGNORING UNFINISHED JOB (%i, %s, %s) DIST/ARCH/SUITE DIFFERS", unfinished_job.id, unfinished_job_package_name, unfinished_job_package_version) # else : # self.log.debug("IGNORING UNFINISHED JOB (%i, %s, %s) VERSION DIFFERS", unfinished_job.id, unfinished_job_package_name, unfinished_job_package_version) # else : # self.log.debug("IGNORING NEW JOB (%i)", unfinished_job.id) # else : # self.log.debug("IGNORING UNFINISHED JOB (%i, %s)", unfinished_job.id, unfinished_job_package_name) return def cancel_all_builds(self): self.log.debug("Cancelling all builds!") # cancels all packages/jobs unfinished_jobs_list = self.db.get_unfinished_jobs() for unfinished_job in unfinished_jobs_list: chan = self.get_amqp_channel() self.process_cancel(unfinished_job, chan) return def cancel_package(self, package_id): # cancels all instances of a package package = self.db.get_package_id(package_id) if not package.id : response.status = "404 - no package matching package_id" else : unfinished_jobs_list = self.db.get_unfinished_jobs() for unfinished_job in unfinished_jobs_list: if (unfinished_job.packageinstance.get_package_name() == package.name) and (unfinished_job.packageinstance.get_package_version() == package.version): chan = self.get_amqp_channel() self.process_cancel(unfinished_job, chan) return def cancel_package_instance(self,job_id): #FIXME: rename... # cancels a specific job/package instance try: if not job_id : response.status = "400 - Required fields missing." return else : job_to_cancel = self.db.get_job(job_id) if not job_to_cancel : response.status = "404 - no job matching id" else : chan = self.get_amqp_channel() self.process_cancel(job_to_cancel, chan) except Exception as e: raise Exception('Error parsing job information: ' + str(e)) response.status = "500 - Error parsing job information" return return def buildd_command_queue_exists(self, build_client): try: self.log.debug("Checking if queue exists: %s", build_client) chan = self.get_amqp_channel() chan.queue_declare(queue=build_client, passive=True, durable=True, exclusive=False, auto_delete=False,) return False except amqp.AMQPChannelException as e: if e.amqp_reply_code == 405: print("405 from buildd_command_queue_exists. Returning True.") return True # Locked i.e. exists elif e.amqp_reply_code == 404: print("404 from buildd_command_queue_exists. Returning False.") return False # doesnt exist else: return False except Exception as e: print(("Error in buildd_command_queue_exists. Returning False." + str(e))) return False; # Error pybit-1.0.0/pybitweb/lookups.py0000755000175000017500000006504112145767757016470 0ustar neilneil00000000000000# pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import Bottle,response,request import jsonpickle from bottle_basic_auth import requires_auth from psycopg2 import errorcodes def get_arch_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_arch(page = None): #return list of arches if page: arches = app.config['db'].get_arches(page) else: arches = app.config['db'].get_arches() encoded = jsonpickle.encode(arches) response.content_type = "application/json" return encoded @app.route('/count', method='GET') def get_count(): #return count of arches count = app.config['db'].count_arches() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_arch_id(arch_id): # Returns all information about a specific arch res = app.config['db'].get_arch_id(arch_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No arch found with this ID." return @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_arch(): try: # Add a new arch. name = request.forms.get('name') if name: app.config['db'].put_arch(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_arch(arch_id): try: # Deletes a specific arch retval = app.config['db'].delete_arch(arch_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_suitearch_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') def get_suitearch(): try: #return list of suitearch suitearches = app.config['db'].get_suitearches() encoded = jsonpickle.encode(suitearches) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of suitearches count = app.config['db'].count_suitearches() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_suitearch_id(suitearch_id): try: # Returns all information about a specific suitearch res = app.config['db'].get_suitearch_id(suitearch_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No suitearch found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_suitearch(): # Add a new suitearch. suite_id = request.forms.get('suite_id') arch_id = request.forms.get('arch_id') master_weight = request.forms.get('master_weight') if not master_weight or master_weight == "": master_weight = 0 if suite_id and arch_id: app.config['db'].put_suitearch(suite_id,arch_id,master_weight) else: response.status = "400 - Required fields missing." return @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_suitearch(suitearch_id): try: # Deletes a specific suitearch retval = app.config['db'].delete_suitearch(suitearch_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_status_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_statuses(page = None): try: #return list of statuses if page: statuses = app.config['db'].get_statuses(page) else: statuses = app.config['db'].get_statuses() encoded = jsonpickle.encode(statuses) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of statuses count = app.config['db'].count_statuses() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_status_id(status_id): try: # Returns all information about a specific status res = app.config['db'].get_status_id(status_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No status found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_status(): try: # Add a new status. name = request.forms.get('name') if name: app.config['db'].put_status(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_status(status_id): try: # Deletes a specific status retval = app.config['db'].delete_status(status_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_dist_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_dists(page = None): try: #return list of distributions if page: dists = app.config['db'].get_dists(page) else: dists = app.config['db'].get_dists() encoded = jsonpickle.encode(dists) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of dists count = app.config['db'].count_dists() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_dist_id(dist_id): try: # Returns all information about a specific dist res = app.config['db'].get_dist_id(dist_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No dist found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_dist(): try: # Add a new dist. name = request.forms.get('name') if name: app.config['db'].put_dist(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_dist(dist_id): try: # Deletes a specific dist retval = app.config['db'].delete_dist(dist_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_format_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_formats(page = None): try: #return list of package formats if page: formats = app.config['db'].get_formats(page) else: formats = app.config['db'].get_formats() encoded = jsonpickle.encode(formats) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of formats count = app.config['db'].count_formats() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_format_id(format_id): try: # Returns all information about a specific format res = app.config['db'].get_format_id(format_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No format found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_format(): try: # Add a new format. name = request.forms.get('name') if name: app.config['db'].put_format(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_format(format_id): try: # Deletes a specific format retval = app.config['db'].delete_format(format_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_suite_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_suites(page = None): try: #return list of suites if page: suites = app.config['db'].get_suites(page) else: suites = app.config['db'].get_suites() encoded = jsonpickle.encode(suites) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of suites count = app.config['db'].count_suites() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_suite_id(suite_id): try: # Returns all information about a specific suite res = app.config['db'].get_suite_id(suite_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No suite found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_suite(): try: # Add a new suite. name = request.forms.get('name') if name: app.config['db'].put_suite(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_suite(suite_id): try: # Deletes a specific suite retval = app.config['db'].delete_suite(suite_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_env_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_build_envs(page = None): try: #return list of environments if page: build_envs = app.config['db'].get_build_envs(page) else: build_envs = app.config['db'].get_build_envs() encoded = jsonpickle.encode(build_envs) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of environments count = app.config['db'].count_build_envs() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_build_env_id(build_env_id): try: # Returns all information about a specific environment res = app.config['db'].get_build_env_id(build_env_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No environment found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_build_env(): try: # Add a new environment. name = request.forms.get('name') if name: app.config['db'].put_build_env(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_build_env(build_env_id): try: # Deletes a specific environment retval = app.config['db'].delete_build_env(build_env_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_buildenv_suitearch_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') def get_buildenv_suitearch(): try: #return list of buildenv_suitearches buildenv_suitearch = app.config['db'].get_buildenv_suitearches() encoded = jsonpickle.encode(buildenv_suitearch) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of suitearches count = app.config['db'].count_buildenv_suitearch() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_suitearch_id(buildenv_suitearch_id): try: # Returns all information about a specific buildenv_suitearch res = app.config['db'].get_buildenv_suitearch_id(buildenv_suitearch_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No buildenv_suitearch found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_buildenv_suitearch(): # Add a new buildenv_suitearch. buildenv_id = request.forms.get('buildenv_id') suitearch_id = request.forms.get('suitearch_id') if buildenv_id and suitearch_id: app.config['db'].put_buildenv_suitearch(buildenv_id,suitearch_id) else: response.status = "400 - Required fields missing." return @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_buildenv_suitearch(buildenv_suitearch_id): try: # Deletes a specific buildenv_suitearch retval = app.config['db'].delete_buildenv_suitearch(buildenv_suitearch_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app def get_blacklist_app(settings, db): app = Bottle() app.config={'settings':settings, 'db':db} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_blacklist(page = None): try: #return full blacklist if page: blacklist = app.config['db'].get_blacklist(page) else: blacklist = app.config['db'].get_blacklist() encoded = jsonpickle.encode(blacklist) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of blacklist rules count = app.config['db'].count_blacklist() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_blacklist_id(blacklist_id): try: # Returns all information about a specific blacklist rule res = app.config['db'].get_blacklist_id(blacklist_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No blacklist found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_blacklist(): try: # Add a new blacklist rule. field = request.forms.get('field') regex = request.forms.get('regex') if field and regex: app.config['db'].put_blacklist(field,regex) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered in put_blacklist(): ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_blacklist(blacklist_id): try: # Deletes a specific blacklist rule. retval = app.config['db'].delete_blacklist(blacklist_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(errorcodes.lookup(retval)) else: response.status = "500 " + str(errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app pybit-1.0.0/pybitweb/packageinstance.py0000755000175000017500000001614612145767770020111 0ustar neilneil00000000000000#!/usr/bin/python # pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import Bottle,response,request import jsonpickle from bottle_basic_auth import requires_auth import psycopg2.errorcodes def get_packageinstance_app(settings, db): app = Bottle() app.config = { 'settings': settings, 'db': db} @app.route('//togglemaster/', method='GET') def update_packageinstance_masterflag(packageinstance_id,master): try: app.config['db'].update_packageinstance_masterflag(packageinstance_id,master) response.status = "202 - Master flag changed." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='GET') @app.route('/page/', method='GET') def get_all_packageinstances(page = None): try: # Returning list of all packageinstances if page: packageinstances = app.config['db'].get_packageinstances(page) else: packageinstances = app.config['db'].get_packageinstances() encoded = jsonpickle.encode(packageinstances) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of packageinstances count = app.config['db'].count_packageinstances() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='GET') def get_packageinstance_id(packageinstance_id): try: # Returns all information about a specific packageinstance res = app.config['db'].get_packageinstance_id(packageinstance_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No packageinstance found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_packageinstance(): try: # Add a new packageinstance. package = request.forms.get('package') version = request.forms.get('version') build_env_id = request.forms.get('build_env_id') # If thet chose "Not specified" if (build_env_id == ""): build_env_id = None arch_id = request.forms.get('arch_id') suite_id = request.forms.get('suite_id') dist_id = request.forms.get('dist_id') format_id = request.forms.get('format_id') slave = request.forms.get('slave') # This but is confusing if slave: #print "SLAVE NOT NULL:" + str (slave) slave = "true" master = "false" else: #print "SLAVE NULL" slave = "false" # not slave means master master = "true" if package and version and arch_id and suite_id and dist_id and format_id and slave: build_env = None package_obj = app.config['db'].get_package_byvalues(package,version)[0] if (build_env_id): build_env = app.config['db'].get_build_env_id(build_env_id) arch = app.config['db'].get_arch_id(arch_id) suite = app.config['db'].get_suite_id(suite_id) dist = app.config['db'].get_dist_id(dist_id) pkg_format = app.config['db'].get_format_id(format_id) app.config['db'].put_packageinstance(package_obj,build_env,arch,suite,dist,pkg_format,master) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_packageinstance(packageinstance_id): try: # Deletes a specific package instance retval = app.config['db'].delete_packageinstance(packageinstance_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(psycopg2.errorcodes.lookup(retval)) else: response.status = "500 " + str(psycopg2.errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/list', method='GET') def get_packageinstances_filtered(): try: response.content_type = "application/json" #TODO - CODEME, filter by parameter (request.query.[x]) return "Returning packageinstances by filter" except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/details/:name', method='GET') def get_packageinstance_versions(name): try: res = app.config['db'].get_packageinstances_byname(name) # lists all instances of a package by name if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No packageinstances found with this name." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app pybit-1.0.0/pybitweb/buildd.py0000755000175000017500000001312512145767734016226 0ustar neilneil00000000000000# pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import Bottle,template,response,request import jsonpickle from bottle_basic_auth import requires_auth import psycopg2.errorcodes def get_buildd_app(settings, db, controller): app = Bottle() app.config= {'settings' : settings, 'db' : db, 'controller': controller} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_buildd(page = None): try: # Return list of BuildDs if page: buildds = app.config['db'].get_buildclients(page) else: buildds = app.config['db'].get_buildclients() encoded = jsonpickle.encode(buildds) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of buildds count = app.config['db'].count_buildclients() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_buildd(): try: # Register a new BuildD. name = request.forms.get('name') if name: app.config['db'].put_buildclient(name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='GET') def get_buildd_id(buildd_id): try: # Returns all information about a specific buildd res = app.config['db'].get_buildd_id(buildd_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No buildd found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_buildd_id(buildd_id): try: # Deletes a specific buildd retval = app.config['db'].delete_buildclient(buildd_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(psycopg2.errorcodes.lookup(retval)) else: response.status = "500 " + str(psycopg2.errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//status', method='GET') def get_buildd_status(buildd_name): #TODO: FIXME try: res = app.config['controller'].buildd_command_queue_exists(buildd_name) if res == True : return "Active" else : return "Not Active" except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//jobs', method='GET') def get_buildd_jobs(buildclient_id): try: #Returns jobs for specified buildd res = app.config['db'].get_buildd_jobs(buildclient_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No buildd found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//:command', method='POST') @requires_auth def post_command(buildd_id,command): try: response.status = "202 - Command sent" #TODO - CODEME return template("POSTed command '{{command}}' to buildd: {{buildd_id}}",buildd_id=buildd_id, command=command) except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app pybit-1.0.0/pybitweb/bottle_basic_auth.py0000644000175000017500000000377412145767727020446 0ustar neilneil00000000000000#!/usr/bin/python # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import pybit from bottle import request,response # TODO: This is a huge bodge. Query the DB for this! def check_auth(username, password): # Load from local settings file in configs, or if not, from system settings in etc. (auth_settings,path) = pybit.load_settings("web/web.conf") if not auth_settings: # Cant load settings return False # Check credentials if auth_settings['web']['username'] == username and auth_settings['web']['password'] == password: return True else: return False def authenticate(): response.content_type = "text/html" response.status = "401 - Unauthorized" response.headers['WWW-Authenticate'] = 'Basic realm="PyBit"' return "401 - Unauthorized" def requires_auth(f): def decorated(*args, **kwargs): auth = request.auth if not auth: return authenticate() elif not check_auth(auth[0],auth[1]): response.status = "401 - Unauthorized" return authenticate() else: return f(*args, **kwargs) return decorated pybit-1.0.0/pybitweb/db.py0000755000175000017500000023205212145767745015354 0ustar neilneil00000000000000# pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import psycopg2.extras import cgi import math import re import logging from pybit.models import Arch,Dist,Format,Status,Suite,BuildD,Job,Package,PackageInstance,SuiteArch,JobHistory, ClientMessage, checkValue, Transport,\ BuildEnv, BuildEnvSuiteArch,Blacklist def remove_nasties(nastystring): try: if isinstance(nastystring, basestring): escaped_string = cgi.escape(nastystring,True) # Escapes <, > , &, and " #print "Escaped the string " + nastystring + " to " + escaped_string return escaped_string else: #print "Not escaping: " + str(nastystring) + " as it is not a string." return nastystring; except Exception as e: raise Exception("Error escaping string: " + str(nastystring) + str(e)) return None class Database(object): conn = None # CONSTANTs limit_low = 5.0 limit_high = 10.0; #<<<<<<<< General database functions >>>>>>>> #Constructor, connects on initialisation. def __init__(self, settings): self.settings = settings self.log = logging.getLogger("db" ) if (('debug' in self.settings) and ( self.settings['debug'])) : self.log.setLevel( logging.DEBUG ) self.log.debug("DB constructor called.") self.connect() #Deconstructor, disconnects on disposal. def __del__(self): self.disconnect() #Connects to DB using settings loaded from file. def connect(self): # for catbells if (checkValue('password',self.settings)): if (checkValue('hostname',self.settings) and checkValue('port',self.settings)): # remote with password self.log.debug("REMOTE WITH PASSWORD") self.conn = psycopg2.connect(database=self.settings['databasename'], user=self.settings['user'], host=self.settings['hostname'], port=self.settings['port'], password=self.settings['password']) else: # local with password self.log.debug("LOCAL WITH PASSWORD") self.conn = psycopg2.connect(database=self.settings['databasename'], user=self.settings['user'], password=self.settings['password']) else: if (checkValue('hostname',self.settings) and checkValue('port',self.settings)): # remote without password self.log.debug("REMOTE WITHOUT PASSWORD") self.conn = psycopg2.connect(database=self.settings['databasename'],user=self.settings['user'], host=self.settings['hostname'],port=self.settings['port']) else: # local without password self.log.debug("LOCAL WITHOUT PASSWORD") self.conn = psycopg2.connect(database=self.settings['databasename'], user=self.settings['user']) #Called by deconstructor def disconnect(self): try: if self.conn: self.conn.commit() self.conn.close() return True except psycopg2.Error as e: self.conn.rollback() raise Exception("Error disconnecting from database. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return False #<<<<<<<< NEW CODE >>>>>>>> def log_buildRequest(self,build_request_obj): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into buildrequest(job,method,uri,vcs_id) VALUES (%s,%s,%s,%s) RETURNING id",(remove_nasties(build_request_obj.job.id),remove_nasties(build_request_obj.transport.method),remove_nasties(build_request_obj.transport.uri),remove_nasties(build_request_obj.transport.vcs_id))) res = cur.fetchall() self.conn.commit() new_id = res[0]['id'] cur.close() return new_id except psycopg2.Error as e: self.conn.rollback() raise Exception("Error logging build. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_jobTransportDetails(self, jobid): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT * FROM buildrequest WHERE job = %s",(remove_nasties(jobid),)) res = cur.fetchall() self.conn.commit() # Get transport details so we can check out the same source to retry a job. transport = Transport(None,res[0]['method'],res[0]['uri'],res[0]['vcs_id']) cur.close() return transport except psycopg2.Error as e: self.conn.rollback() raise Exception("Error logging build. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None #<<<<<<<< Lookup table queries >>>>>>>> # Do we care about update or delete? def count_arches(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM arch AS num_arches") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving arches count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_arches(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM arch ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM arch ORDER BY name") res = cur.fetchall() self.conn.commit() arches = [] for i in res: arches.append(Arch(i['id'],i['name'])) cur.close() return arches except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving arches list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_arch_id(self,arch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM arch WHERE id=%s",(arch_id,)) res = cur.fetchall() self.conn.commit() arch = Arch(res[0]['id'],res[0]['name']) cur.close() return arch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving arch with id:" + str(arch_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_arch_byname(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM arch WHERE name=%s",(name,)) res = cur.fetchall() self.conn.commit() arches = [] for i in res: arches.append(Arch(i['id'],i['name'])) cur.close() return arches except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving arch by name:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_arch(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into arch(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() arch = Arch(res[0]['id'],name) cur.close() return arch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding arch:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_arch(self,arch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM arch WHERE id=%s RETURNING id",(arch_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == arch_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting arch with id: %s. Database error code: %s - Details: %s.",str(arch_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_suitearches(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM suitearches AS num_suitearches") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suitearches count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suitearches(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,suite_id,arch_id,master_weight FROM suitearches ORDER BY master_weight DESC") res = cur.fetchall() self.conn.commit() suite_arches = [] for i in res: suite_arches.append(SuiteArch(i['id'],self.get_suite_id(i['suite_id']),self.get_arch_id(i['arch_id']),i['master_weight'])) cur.close() return suite_arches except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite arches list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suitearch_id(self,suitearch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,suite_id,arch_id,master_weight FROM suitearches WHERE id=%s",(suitearch_id,)) res = cur.fetchall() self.conn.commit() suitearch = SuiteArch(res[0]['id'],self.get_suite_id(res[0]['suite_id']),self.get_arch_id(res[0]['arch_id']),res[0]['master_weight']) cur.close() return suitearch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite arch with id:" + str(suitearch_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suitearch_by_suite_name(self,suite,arch): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,suite_id,arch_id,master_weight FROM suitearches WHERE suite.id=%s, arch.id=%s",(suite.id,arch.id)) res = cur.fetchall() self.conn.commit() suitearch = SuiteArch(res[0]['id'],self.get_suite_id(res[0]['suite_id']),self.get_arch_id(res[0]['arch_id']),res[0]['master_weight']) cur.close() return suitearch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite arch with suite and arch:. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_suitearch(self,suite_id,arch_id,master_weight = 0): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into suitearches(suite_id,arch_id,master_weight) VALUES (%s, %s, %s) RETURNING id",(remove_nasties(suite_id),remove_nasties(arch_id),remove_nasties(master_weight))) res = cur.fetchall() self.conn.commit() suitearch = SuiteArch(res[0]['id'],self.get_suite_id(suite_id),self.get_arch_id(arch_id),master_weight) cur.close() return suitearch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding suite arch:" + suite_id + arch_id + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_suitearch(self,suitearch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM suitearches WHERE id=%s RETURNING id",(suitearch_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == suitearch_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting suitearch with id: %s. Database error code: %s - Details: %s.",str(suitearch_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_buildenv_suitearches(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM buildenvsuitearch AS num_buildenv_suitearches") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv suitearch count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_buildenv_suitearches(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,buildenv_id,suitearch_id FROM buildenvsuitearch") res = cur.fetchall() self.conn.commit() buildenv_suitearches = [] for i in res: buildenv_suitearches.append(BuildEnvSuiteArch(i['id'],self.get_build_env_id(i['buildenv_id']),self.get_suitearch_id(i['suitearch_id']))) cur.close() return buildenv_suitearches except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv suitearch list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_buildenv_suitearch_id(self,buildenv_suitearch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,buildenv_id,suitearch_id FROM buildenvsuitearch WHERE id=%s",(buildenv_suitearch_id,)) res = cur.fetchall() self.conn.commit() buildenv_suitearch = BuildEnvSuiteArch(res[0]['id'],self.get_build_env_id(res[0]['buildenv_id']),self.get_suitearch_id(res[0]['suitearch_id'])) cur.close() return buildenv_suitearch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv suitearch with id:" + str(buildenv_suitearch_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_buildenv_suitearch(self,buildenv_id,suitearch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into buildenvsuitearch(buildenv_id,suitearch_id) VALUES (%s, %s) RETURNING id",(remove_nasties(buildenv_id),remove_nasties(suitearch_id))) res = cur.fetchall() self.conn.commit() buildenv_suitearch = BuildEnvSuiteArch(res[0]['id'],self.get_build_env_id(buildenv_id),self.get_suitearch_id(suitearch_id)) cur.close() return buildenv_suitearch except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding buildenv suitearch:" + buildenv_id + suitearch_id + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_buildenv_suitearch(self,buildenv_suitearch_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM buildenvsuitearch WHERE id=%s RETURNING id",(buildenv_suitearch_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == buildenv_suitearch_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting buildenvsuitearch with id: %s. Database error code: %s - Details: %s.",str(buildenv_suitearch_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_dists(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM distribution AS num_dists") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving distributions count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_dists(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM distribution ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM distribution ORDER BY name") res = cur.fetchall() self.conn.commit() dists = [] for i in res: dists.append(Dist(i['id'],i['name'])) cur.close() return dists except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving dist list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_dist_id(self,dist_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM distribution WHERE id=%s",(dist_id,)) res = cur.fetchall() self.conn.commit() dist = Dist(res[0]['id'],res[0]['name']) cur.close() return dist except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving dist with id:" + str(dist_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_dist_byname(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM distribution WHERE name=%s",(name,)) res = cur.fetchall() self.conn.commit() dists = [] for i in res: dists.append(Dist(i['id'],i['name'])) cur.close() return dists except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving dist by name:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_dist(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into distribution(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() dist = Dist(res[0]['id'],name) cur.close() return dist except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding dist:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_dist(self,dist_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM distribution WHERE id=%s RETURNING id",(dist_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == dist_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting dist with id: %s. Database error code: %s - Details: %s.",str(dist_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_formats(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM format AS num_formats") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving formats count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_formats(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM format ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM format ORDER BY name") res = cur.fetchall() self.conn.commit() formats = [] for i in res: formats.append(Format(i['id'],i['name'])) cur.close() return formats except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving formats list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_format_id(self,format_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM format WHERE id=%s",(format_id,)) res = cur.fetchall() self.conn.commit() ret_format = Format(res[0]['id'],res[0]['name']) cur.close() return ret_format except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving format with id:" + str(format_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_format_byname(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM format WHERE name=%s",(name,)) res = cur.fetchall() formats = [] for i in res: formats.append(Format(i['id'],i['name'])) cur.close() return formats except psycopg2.Error as e: raise Exception("Error retrieving format by name:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_format(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into format(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() ret_format = Format(res[0]['id'],name) cur.close() return ret_format except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding format:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_format(self,format_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM format WHERE id=%s RETURNING id",(format_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == format_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting format with id: %s. Database error code: %s - Details: %s.",str(format_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_statuses(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM status AS num_statuses") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); # ALWAYS round up. except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving statuses count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_statuses(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM status ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM status ORDER BY name") res = cur.fetchall() self.conn.commit() statuses = [] for i in res: statuses.append(Status(i['id'],i['name'])) cur.close() return statuses except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving status list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_status_id(self,status_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM status WHERE id=%s",(status_id,)) res = cur.fetchall() self.conn.commit() status = Status(res[0]['id'],res[0]['name']) cur.close() return status except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving status with id:" + str(status_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_status(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into status(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() status = Status(res[0]['id'],name) cur.close() return status except psycopg2.Error as e: self.conn.rollback() raise Exception("Error add status:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_status(self,status_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM status WHERE id=%s RETURNING id",(status_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == status_id: cur.close() return True else: cur.cloes() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting status with id: %s. Database error code: %s - Details: %s.",str(status_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_suites(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM suite AS num_suites") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suites count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suites(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM suite ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM suite ORDER BY name") res = cur.fetchall() self.conn.commit() suites = [] for i in res: suites.append(Suite(i['id'],i['name'])) cur.close() return suites except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suite_id(self,suite_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM suite WHERE id=%s",(suite_id,)) res = cur.fetchall() self.conn.commit() suite = Suite(res[0]['id'],res[0]['name']) cur.close() return suite except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite with id:" + str(suite_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_suite_byname(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM suite WHERE name=%s",(name,)) res = cur.fetchall() self.conn.commit() suites = [] for i in res: suites.append(Suite(i['id'],i['name'])) cur.close() return suites except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving suite with name:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_suite(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into suite(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() suite = Suite(res[0]['id'],name) cur.close() return suite except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding suite:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_suite(self,suite_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM suite WHERE id=%s RETURNING id",(suite_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == suite_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting suite with id: %s. Database error code: %s - Details: %s.",str(suite_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def count_build_envs(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM buildenv AS num_build_envs") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_build_envs(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,name FROM buildenv ORDER BY name LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,name FROM buildenv ORDER BY name") res = cur.fetchall() self.conn.commit() build_envs = [] for i in res: build_envs.append(BuildEnv(i['id'],i['name'])) cur.close() return build_envs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_build_env_id(self,build_env_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM buildenv WHERE id=%s",(build_env_id,)) res = cur.fetchall() self.conn.commit() if res: build_env = BuildEnv(res[0]['id'],res[0]['name']) else: build_env = None cur.close() return build_env except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv with id:" + str(build_env_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_build_env_byname(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM buildenv WHERE name=%s",(name,)) res = cur.fetchall() self.conn.commit() build_envs = [] for i in res: build_envs.append(BuildEnv(i['id'],i['name'])) cur.close() return build_envs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildenv with name:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_build_env(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT INTO buildenv(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() build_env = BuildEnv(res[0]['id'],name) cur.close() return build_env except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding build_env:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_build_env(self,build_env_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM buildenv WHERE id=%s RETURNING id",(build_env_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == build_env_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting buildenv with id: %s. Database error code: %s - Details: %s.",str(build_env_id),str(e.pgcode),str(e.pgerror)) return e.pgcode #<<<<<<<< BuildD related database functions >>>>>>>> def count_buildclients(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM buildclients AS num_buildclients") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_high; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildclients count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_buildclients(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: # CONSTANT offset = (page -1) * self.limit_high; cur.execute("SELECT id,name FROM buildclients ORDER BY name LIMIT %s OFFSET %s", (self.limit_high,offset,)) else: cur.execute("SELECT id,name FROM buildclients ORDER BY name") res = cur.fetchall() self.conn.commit() build_clients = [] for i in res: build_clients.append(BuildD(i['id'],i['name'])) cur.close() return build_clients except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildd list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_buildd_id(self,buildd_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name FROM buildclients WHERE id=%s",(buildd_id,)) res = cur.fetchall() self.conn.commit() if (res): buildd = BuildD(res[0]['id'],res[0]['name']) cur.close() return buildd else: return None except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving buildd with id:" + str(buildd_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_buildclient(self,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into buildclients(name) VALUES (%s) RETURNING id",(remove_nasties(name),)) res = cur.fetchall() self.conn.commit() buildd = BuildD(res[0]['id'],name) cur.close() return buildd except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding buildd:" + name + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_buildclient(self,buildclient_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM buildclients WHERE id=%s RETURNING id",(buildclient_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == buildclient_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting buildd with id: %s. Database error code: %s - Details: %s.",str(buildclient_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def get_buildd_jobs(self,buildclient_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT job.id AS job_id,packageinstance_id,buildclients.id AS buildclients_id FROM buildclients,job WHERE buildclients.id=%s AND buildclients.id = job.buildclient_id ORDER BY job.id",(buildclient_id,)) res = cur.fetchall() self.conn.commit() jobs = [] for i in res: packageinstance = self.get_packageinstance_id(i['packageinstance_id']) jobs.append(Job(i['job_id'],packageinstance,buildclient_id)) cur.close() return jobs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving jobs on buildd with id:" + str(buildclient_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None #<<<<<<<< Job related database functions >>>>>>>> # UPDATE queries? def get_job(self,job_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,packageinstance_id,buildclient_id FROM job WHERE id=%s",(job_id,)) res = cur.fetchall() self.conn.commit() packageinstance = self.get_packageinstance_id(res[0]['packageinstance_id']) buildclient = self.get_buildd_id(res[0]['buildclient_id']) if res[0]['buildclient_id'] else None job = Job(res[0]['id'],packageinstance,buildclient) cur.close() return job except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving job with id:" + str(job_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_jobs(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: # CONSTANT offset = (page -1) * self.limit_high; cur.execute("SELECT id,packageinstance_id,buildclient_id FROM job ORDER BY id LIMIT %s OFFSET %s", (self.limit_high,offset,)) else: cur.execute("SELECT id,packageinstance_id,buildclient_id FROM job ORDER BY id") res = cur.fetchall() self.conn.commit() jobs = [] for i in res: packageinstance = self.get_packageinstance_id(i['packageinstance_id']) buildclient = self.get_buildd_id(i['buildclient_id']) if i['buildclient_id'] else None jobs.append(Job(i['id'],packageinstance,buildclient)) cur.close() return jobs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving jobs list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_jobs_by_status(self,status): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("WITH latest_status AS (SELECT DISTINCT ON (job_id) job_id, status.name FROM jobstatus LEFT JOIN status ON status_id=status.id ORDER BY job_id, time DESC) SELECT job_id, name FROM latest_status WHERE name=%s",(status,)); res = cur.fetchall() self.conn.commit() jobs = [] for i in res: jobs.append(self.get_job(i['job_id'])) cur.close() return jobs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving jobs list with status:" + status + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_unfinished_jobs(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("WITH latest_status AS (SELECT DISTINCT ON (job_id) job_id, status.name FROM jobstatus LEFT JOIN status ON status_id=status.id ORDER BY job_id, time DESC) SELECT job_id, name FROM latest_status WHERE name!='Uploaded' AND name!='Done' AND name!='Cancelled' ORDER BY job_id"); res = cur.fetchall() self.conn.commit() jobs = [] for i in res: jobs.append(self.get_job(i['job_id'])) cur.close() return jobs except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving unfinished jobs. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_job_statuses(self,job_id): #gets job status *history* try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT job.id AS job_id, status.name AS status, buildclients.name AS buildclient, jobstatus.time AS time FROM job LEFT JOIN jobstatus ON job.id=jobstatus.job_id LEFT JOIN status ON jobstatus.status_id=status.id LEFT JOIN buildclients ON buildclients.id=job.buildclient_id WHERE job.id = %s ORDER BY time",(job_id,)); res = cur.fetchall() self.conn.commit() jobstatuses = [] for i in res: jobstatuses.append(JobHistory(i['job_id'],i['status'],i['buildclient'],i['time'])) cur.close() return jobstatuses except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving job status with:" + str(job_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_job_status(self, jobid, status, client=None): try: self.log.debug("put_job_status: %s %s %s", jobid, status, client) cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT INTO jobstatus (job_id, status_id) VALUES (%s, (SELECT id FROM status WHERE name=%s))", (remove_nasties(jobid),remove_nasties(status),)) if client is not None and client != "": #insert the client if it doesn't already exist. cur.execute("INSERT INTO buildclients(name) SELECT name FROM buildclients UNION VALUES(%s) EXCEPT SELECT name FROM buildclients", (remove_nasties(client),)) cur.execute("UPDATE job SET buildclient_id=(SELECT id FROM buildclients WHERE name=%s) WHERE id=%s", (remove_nasties(client),remove_nasties(jobid))) self.conn.commit() cur.close() except psycopg2.Error as e: self.conn.rollback() raise Exception("Error setting job status. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_job(self,job_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("WITH latest_status AS (SELECT DISTINCT ON (job_id) job_id, status.name FROM jobstatus LEFT JOIN status ON status_id=status.id WHERE job_id=%s ORDER BY job_id, time DESC) SELECT job_id, name FROM latest_status WHERE name!='Building'",(job_id,)) res = cur.fetchall() self.conn.commit() if len(res) > 0: cur.execute("DELETE FROM jobstatus WHERE job_id=%s RETURNING id",(job_id,)) self.conn.commit() cur.execute("DELETE FROM job WHERE id=%s RETURNING id",(job_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == job_id: cur.close() return True else: cur.close() return False else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting job with id: %s. Database error code: %s - Details: %s.",str(job_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def put_job(self,packageinstance,buildclient): try: if buildclient: buildclient_id = remove_nasties(buildclient.id) else: buildclient_id = None cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT INTO job (packageinstance_id,buildclient_id) VALUES (%s, %s) RETURNING id",(remove_nasties(packageinstance.id),(buildclient_id))) res = cur.fetchall() job_id = res[0]['id'] if job_id is not None: cur.execute("INSERT INTO jobstatus (job_id, status_id) VALUES (%s, (SELECT id FROM status WHERE status.name=%s))", (remove_nasties(job_id), remove_nasties(ClientMessage.waiting))) self.conn.commit() else: self.conn.rollback() job = Job(job_id,packageinstance,buildclient) cur.close() return job except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding job. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None #<<<<<<<< Package related database functions >>>>>>>> # UPDATE queries? def count_packages(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM package AS num_packages") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_high; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving packages count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packages(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: # CONSTANT offset = (page -1) * self.limit_high; cur.execute("SELECT id,version,name FROM package ORDER BY name,id LIMIT %s OFFSET %s", (self.limit_high,offset,)) else: cur.execute("SELECT id,version,name FROM package ORDER BY name,id") res = cur.fetchall() self.conn.commit() packages = [] for i in res: packages.append(Package(i['id'],i['version'],i['name'])) cur.close() return packages except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving packages list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packagenames(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT DISTINCT (name), name FROM package GROUP BY name ORDER BY name") # We only care about a unique list of names res = cur.fetchall() self.conn.commit() packages = [] for i in res: packages.append(Package(None,None,i['name'])) # TODO: these may actually be useful to have still. cur.close() return packages except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving packages list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packages_byname(self, name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,version,name FROM package WHERE name=%s",(name,)) res = cur.fetchall() self.conn.commit() packages = [] for i in res: packages.append(Package(i['id'],i['version'],i['name'])) cur.close() return packages except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package with name:" + str(name) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_package_id(self,package_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,version,name FROM package WHERE id=%s",(package_id,)) res = cur.fetchall() self.conn.commit() package = Package(res[0]['id'],res[0]['version'],res[0]['name']) cur.close() return package except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package with id:" + str(package_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_package_byvalues(self,name,version): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,name,version FROM package WHERE name=%s AND version=%s",(name,version)) res = cur.fetchall() self.conn.commit() packages = [] for i in res: packages.append(Package(i['id'],i['version'],i['name'])) cur.close() return packages except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package by values:" + name + version + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_package(self,version,name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into package(version,name) VALUES (%s, %s) RETURNING id",(remove_nasties(version),remove_nasties(name))) res = cur.fetchall() self.conn.commit() package = Package(res[0]['id'],version,name) cur.close() return package except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding package:" + name + version + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_package(self,package_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM package WHERE id=%s RETURNING id",(package_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == package_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting package with id: %s. Database error code: %s - Details: %s.",str(package_id),str(e.pgcode),str(e.pgerror)) return e.pgcode #<<<<<<<<< Packageinstance related Queries >>>>>>> def get_packageinstance_id(self,packageinstance_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance WHERE id=%s",(packageinstance_id,)) res = cur.fetchall() self.conn.commit() package = self.get_package_id(res[0]['package_id']) build_env = self.get_build_env_id(res[0]['buildenv_id']) arch = self.get_arch_id(res[0]['arch_id']) suite = self.get_suite_id(res[0]['suite_id']) dist = self.get_dist_id(res[0]['dist_id']) pkg_format = self.get_format_id(res[0]['format_id']) p_i = PackageInstance(res[0]['id'],package,arch,build_env,suite,dist,pkg_format,res[0]['master']) cur.close() return p_i except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package instance with:" + str(packageinstance_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def count_packageinstances(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM packageinstance AS num_packageinstances") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_high; pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving packageinstances count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packageinstances(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: # CONSTANT offset = (page -1) * self.limit_high; cur.execute("SELECT id,package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance ORDER BY id LIMIT %s OFFSET %s", (self.limit_high,offset,)) else: cur.execute("SELECT id,package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance ORDER BY id") res = cur.fetchall() self.conn.commit() packageinstances = [] for i in res: packageinstances.append(self.get_packageinstance_id(i['id'])) cur.close() return packageinstances except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package instances list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packageinstances_byname(self, name): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT packageinstance.id AS id,package.id AS package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance,package WHERE packageinstance.package_id = package.id AND name = %s ORDER BY package_id, id",(name,)) res = cur.fetchall() self.conn.commit() packageinstances = [] for i in res: packageinstances.append(self.get_packageinstance_id(i['id'])) cur.close() return packageinstances except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package instances by name. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_packageinstance_byvalues(self,package,build_env,arch,suite,dist,pkg_format): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if build_env: cur.execute("SELECT id,package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance WHERE package_id=%s AND buildenv_id=%s AND arch_id=%s AND suite_id=%s AND dist_id=%s AND format_id=%s",(package.id,build_env.id,arch.id,suite.id,dist.id,pkg_format.id)) else: cur.execute("SELECT id,package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master FROM packageinstance WHERE package_id=%s AND buildenv_id IS NULL AND arch_id=%s AND suite_id=%s AND dist_id=%s AND format_id=%s",(package.id,arch.id,suite.id,dist.id,pkg_format.id)) res = cur.fetchall() self.conn.commit() packageinstances = [] for i in res: packageinstances.append(self.get_packageinstance_id(i['id'])) cur.close() return packageinstances except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package instance by value. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_packageinstance(self,package,build_env,arch,suite,dist,pkg_format,master): try: # The buildenv_id field in the DB is allowed to be null. # We may be passed a None build_env object and must handle this. if build_env: build_env_id = build_env.id else: build_env_id = None; cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into packageinstance(package_id,buildenv_id,arch_id,suite_id,dist_id,format_id,master) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id",(remove_nasties(package.id),remove_nasties(build_env_id),remove_nasties(arch.id),remove_nasties(suite.id),remove_nasties(dist.id),remove_nasties(pkg_format.id),remove_nasties(master))) self.conn.commit() res = cur.fetchall() self.conn.commit() p_i = PackageInstance(res[0]['id'],package,arch,build_env,suite,dist,pkg_format,master) cur.close() return p_i except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding package instance:" + str(package.id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def update_packageinstance_masterflag(self,packageinstance_id,master): try: if master == 1: master = True elif master == 0: master = False else: return None; cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("UPDATE packageinstance SET master=%s WHERE id=%s",(remove_nasties(master),remove_nasties(packageinstance_id))) self.conn.commit() self.conn.commit() cur.close() return except psycopg2.Error as e: self.conn.rollback() raise Exception("Error updating package instance master flag:" + str(packageinstance_id) + " to " + str(master) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_packageinstance(self,packageinstance_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM packageinstance WHERE id=%s RETURNING id",(packageinstance_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == packageinstance_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting package instance with id: %s. Database error code: %s - Details: %s.",str(packageinstance_id),str(e.pgcode),str(e.pgerror)) return e.pgcode def check_specific_packageinstance_exists(self,build_env,arch,package,distribution,pkg_format,suite): try: if arch and distribution and pkg_format and package and suite: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if build_env: cur.execute("SELECT id FROM packageinstance WHERE buildenv_id=%s AND arch_id=%s AND dist_id=%s AND format_id=%s AND package_id=%s AND suite_id=%s",(build_env.id,arch.id,distribution.id,pkg_format.id,package.id,suite.id)) else: cur.execute("SELECT id FROM packageinstance WHERE buildenv_id IS NULL AND arch_id=%s AND dist_id=%s AND format_id=%s AND package_id=%s AND suite_id=%s",(arch.id,distribution.id,pkg_format.id,package.id,suite.id)) res = cur.fetchall() self.conn.commit() if len(res) > 0: #Found specific package instance cur.close() return True else: # doesnt exist #Cannot find specific package instance cur.close() return False else: #Error finding specific package instance cur.close() return False except psycopg2.Error as e: self.conn.rollback() raise Exception("Error checking package instance exists. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None #<<<<<<<<< Report Queries >>>>>>> def check_package_has_unfinished_jobs(self, package_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("WITH latest_status AS (SELECT DISTINCT ON (job_id) job_id, status.name FROM jobstatus LEFT JOIN status ON status_id=status.id ORDER BY job_id, time DESC) SELECT job_id, name, package_id FROM latest_status LEFT JOIN job ON latest_status.job_id=job.id LEFT JOIN packageinstance ON packageinstance_id=packageinstance.id WHERE package_id=%s AND name NOT IN ('Done', 'Uploaded', 'Cancelled')",(package_id,)); res = cur.fetchall() self.conn.commit() if res and len(res) > 0: return True else: return False except psycopg2.Error as e: self.conn.rollback() raise Exception("Error check package has unfinished jobs. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_report_package_instance(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT packageinstance.id, suite.name AS suite, package.name AS package, package.version AS version, arch.name AS arch, packageinstance.buildenv_id AS buildenv_id, format.name AS format, distribution.name AS dist, packageinstance.master AS master FROM packageinstance LEFT JOIN arch ON arch.id=arch_id LEFT JOIN suite ON suite.id=suite_id LEFT JOIN distribution ON distribution.id=dist_id LEFT JOIN package ON package_id=package.id LEFT JOIN format ON format_id=format.id") res = cur.fetchall() self.conn.commit() package_instances = [] for i in res : package_instances.append(PackageInstance(i['id'], i['package'], i['arch'], i['buildenv_id'], i['suite'], i['dist'], i['format'], i['master'])) cur.close() return package_instances except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving package instance list. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_supported_architectures(self,suite) : try: if suite : cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT arch.id, arch.name, suitearches.master_weight FROM suite LEFT JOIN suitearches ON suite.id=suite_id LEFT JOIN arch ON arch_id = arch.id WHERE suite.name=%s ORDER BY master_weight DESC, random()",[suite]) res = cur.fetchall() self.conn.commit() arch_list = [] for i in res : arch_list.append(i['name']) cur.close() return arch_list else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving supported architectures for:" + suite + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_supported_build_environments(self,suite) : try: if suite : cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT DISTINCT ON (buildenv.id) buildenv.id AS buildenv_id FROM suitearches LEFT JOIN buildenvsuitearch ON suitearches.id=suitearch_id LEFT JOIN buildenv ON buildenvsuitearch.buildenv_id=buildenv.id WHERE suitearches.suite_id=(SELECT id FROM suite WHERE name=%s)",(remove_nasties(suite),)) res = cur.fetchall() self.conn.commit() env_list = [] for i in res : build_env = self.get_build_env_id(i['buildenv_id']) #self.log.debug("SUITE (%s) HAS SUPPORTED BUILD ENV:%s",suite,build_env.name) env_list.append(build_env) cur.close() return env_list else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving supported build environments for:" + suite + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_supported_build_env_suite_arches(self,suite) : try: if suite : cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT buildenv.id AS buildenv_id, suitearches.id AS suitearch_id, suitearches.master_weight AS suitearch_master_weight, buildenvsuitearch.id AS buildenvsuitearch_id FROM suitearches LEFT JOIN buildenvsuitearch ON suitearches.id=suitearch_id LEFT JOIN buildenv ON buildenvsuitearch.buildenv_id=buildenv.id WHERE suitearches.suite_id=(SELECT id FROM suite WHERE name=%s) ORDER BY buildenv_id, suitearch_master_weight DESC",(remove_nasties(suite),)) res = cur.fetchall() self.conn.commit() build_env_suite_arch_list = [] for i in res : suitearch = self.get_suitearch_id(i['suitearch_id']) build_env = self.get_build_env_id(i['buildenv_id']) if not build_env : buildenvsuitearch = BuildEnvSuiteArch(i['buildenvsuitearch_id'],None,suitearch) else : buildenvsuitearch = BuildEnvSuiteArch(i['buildenvsuitearch_id'],build_env,suitearch) build_env_suite_arch_list.append(buildenvsuitearch) cur.close() return build_env_suite_arch_list else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving supported build environments for:" + suite + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None # Note: True = failed, false = Ok. Should probably be renamed isInBlacklist() or similar. def check_blacklist(self,field,value): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,field,regex FROM blacklist WHERE field=%s",[field]) res = cur.fetchall() self.conn.commit() for i in res: # Check passed in value using i.regex - Search() or Match() ? match = re.search(i['regex'], value) # An invalid regexp will throw an exception here. Valid regexp is i.e: name field and (.*-dev) or vcs_uri field and (.*/users/*) if match is not None: self.log.debug("BLACKLISTED! %s matches %s : %s", str(i['regex']), str(field), str(value)) cur.close() return True else: cur.close() return False cur.close() return False # If no results, that is fine too. except psycopg2.Error as e: self.conn.rollback() raise Exception("Error checking blacklist. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def count_blacklist(self): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT COUNT(*) FROM blacklist AS num_blacklist") res = cur.fetchall() self.conn.commit() cur.close() if res[0][0]: pages = res[0][0] / self.limit_low; else: pages = 1 return math.ceil(pages); except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving blacklist count. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_blacklist(self,page=None): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) if page: offset = (page -1) * self.limit_low; cur.execute("SELECT id,field,regex FROM blacklist ORDER BY field LIMIT %s OFFSET %s", (self.limit_low,offset,)) else: cur.execute("SELECT id,field,regex FROM blacklist ORDER BY field") res = cur.fetchall() self.conn.commit() blacklist = [] for i in res: blacklist.append(Blacklist(i['id'],i['field'],i['regex'])) cur.close() return blacklist except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving blacklist. Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def get_blacklist_id(self,blacklist_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("SELECT id,field,regex FROM blacklist WHERE id=%s",(blacklist_id,)) res = cur.fetchall() self.conn.commit() blacklist = Blacklist(res[0]['id'],res[0]['field'],res['0']['regex']) cur.close() return blacklist except psycopg2.Error as e: self.conn.rollback() raise Exception("Error retrieving blacklist rule with id:" + str(blacklist_id) + ". Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def put_blacklist(self,field,regex): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("INSERT into blacklist(field,regex) VALUES (%s,%s) RETURNING id",(remove_nasties(field),remove_nasties(regex))) res = cur.fetchall() self.conn.commit() blacklist = Blacklist(res[0]['id'],field,regex) cur.close() return blacklist except psycopg2.Error as e: self.conn.rollback() raise Exception("Error adding blacklist rule:" + " .Database error code: " + str(e.pgcode) + " - Details: " + str(e.pgerror)) return None def delete_blacklist(self,blacklist_id): try: cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("DELETE FROM blacklist WHERE id=%s RETURNING id",(blacklist_id,)) res = cur.fetchall() self.conn.commit() if res[0]['id'] == blacklist_id: cur.close() return True else: cur.close() return False except psycopg2.Error as e: self.conn.rollback() self.log.debug("Error deleting blacklist with id: %s. Database error code: %s - Details: %s.",str(blacklist_id),str(e.pgcode),str(e.pgerror)) return e.pgcode pybit-1.0.0/pybitweb/package.py0000755000175000017500000001603312145767763016361 0ustar neilneil00000000000000# pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import Bottle,response,request import jsonpickle from bottle_basic_auth import requires_auth import psycopg2.errorcodes def get_packages_app(settings, db, controller): app = Bottle() app.config={'settings':settings,'db':db, 'controller' : controller} @app.route('/', method='GET') @app.route('/page/', method='GET') def get_all_packages(page = None): try: # Returning list of all packages if page: packages = app.config['db'].get_packages(page) else: packages = app.config['db'].get_packages() encoded = jsonpickle.encode(packages) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/count', method='GET') def get_count(): #return count of packages count = app.config['db'].count_packages() encoded = jsonpickle.encode(count) response.content_type = "application/json" return encoded @app.route('/names', method='GET') def list_packagenames(): try: # Returning list of all package names packages = app.config['db'].get_packagenames() encoded = jsonpickle.encode(packages) response.content_type = "application/json" return encoded except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='GET') def get_package_id(package_id): try: # Returns all information about a specific package res = app.config['db'].get_package_id(package_id) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No package found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_package(): try: # Add a new package. version = request.forms.get('version') name = request.forms.get('name') if version and name: app.config['db'].put_package(version,name) else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def delete_package(package_id): try: # Deletes a specific buildd retval = app.config['db'].delete_package(package_id) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(psycopg2.errorcodes.lookup(retval)) else: response.status = "500 " + str(psycopg2.errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None #NEW: Have controller cancel all jobs for this package. @app.route('//cancel', method='GET') @requires_auth def cancel_package(package_id): try: response.status = "202 - CANCEL PACKAGE request received" app.config['controller'].cancel_package(package_id) return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/list', method='GET') # TODO, filter by parameter (request.query.[x]) def get_packages_filtered(): try: response.content_type = "application/json" #TODO - CODEME return "Returning packages by filter" except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None # Gets package versions (not instances!) by name. @app.route('/details/:name', method='GET') def get_package_versions(name): try: res = app.config['db'].get_packages_byname(name) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 No packages found with this name." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None # Gets package versions (not instances!) by name. @app.route('//active', method='GET') def get_package_active_jobs(package_id): try: response.content_type = "text/plain" return str(app.config['db'].check_package_has_unfinished_jobs(package_id)) except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/details/:name/:version', method='GET') def get_package_details(name,version): try: res = app.config['db'].get_package_byvalues(name,version) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 No package found with this name and version." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app pybit-1.0.0/pybitweb/job.py0000755000175000017500000002561412145767752015543 0ustar neilneil00000000000000#!/usr/bin/python # pybit-web # Copyright 2012: # # Nick Davidson , # Simon Haswell , # Neil Williams , # James Bennet # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from bottle import Bottle,response,request import jsonpickle import logging from bottle_basic_auth import requires_auth from pybit.models import Transport import psycopg2.errorcodes #NEW: proxy to class method controller.add def get_job_app(settings, db, controller) : app = Bottle() app.config={'settings':settings, 'db':db, 'controller': controller} app.log = logging.getLogger( "web" ) if (('debug' in settings['web']) and ( settings['web']['debug'])) : app.log.setLevel( logging.DEBUG ) @app.route('/vcshook', method='POST') @app.route('/vcshook', method='PUT') @requires_auth def vcs_hook(): try: response.status = "200 - Version control poke received" uri = request.forms.get('uri') method = request.forms.get('method') dist = request.forms.get('distribution') vcs_id = request.forms.get('vcs_id') architectures = request.forms.get('architecture_list') version = request.forms.get('package_version') package_name = request.forms.get('package') suite = request.forms.get('suite') pkg_format = request.forms.get('format') if not uri and not method and not dist and not architectures and not version and not package_name and not suite and not pkg_format : response.status = "400 - Required fields missing." return None else : app.log.debug("RECEIVED BUILD REQUEST FOR %s, %s, %s, %s", package_name, version, suite, architectures) # NOTE:VCS Hook does not send build_environment. if app.config['controller'].process_job(dist, architectures, version, package_name, suite, pkg_format, Transport(None, method, uri, vcs_id),None): return else: return False except Exception as e: raise Exception('Exception encountered in vcs_hook: ' + str(e)) response.status = "500 - Exception encountered in vcs_hook" return None @app.route('/', method='GET') @app.route('/page/', method='GET') def get_jobs(page = None): try: response.content_type = "application/json" #return list of ALL jobs if page: return jsonpickle.encode(app.config['db'].get_jobs(page)) else: return jsonpickle.encode(app.config['db'].get_jobs()) except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None #NEW: Have controller cancel all jobs. @app.route('/cancelall', method='GET') @app.route('/', method='DELETE') @requires_auth def cancel_jobs(): try: response.status = "202 - CANCEL ALL request received" app.config['controller'].cancel_all_builds() return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None #NEW: Have controller cancel a specific job. @app.route('//cancel', method='GET') @requires_auth def cancel_job(jobid): try: response.status = "202 - CANCEL JOB request received" app.config['controller'].cancel_package_instance(jobid) return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/status', method='GET') def get_jobstatuses(): try: response.content_type = "application/json" #return list of UNFINISHED jobs" res = app.config['db'].get_unfinished_jobs() return jsonpickle.encode(res) except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='PUT') @app.route('/', method='POST') @requires_auth def update_job_status(jobid): job_status = request.forms.status job_client = None if hasattr(request.forms, 'client') : job_client = request.forms.client if job_status: job = app.config['db'].get_job(jobid) if job is not None: app.log.debug("Setting job:%i to %s", job.id, job_status) app.config['db'].put_job_status(job.id, job_status, job_client) else: response.status = "404 - No job found with this ID." return else: response.status = "400 - Required fields missing." return @app.route('/status/', method='GET') def get_jobs_bystatus(status): try: response.content_type = "application/json" #return list of UNFINISHED jobs" res = app.config['db'].get_jobs_by_status(status) return jsonpickle.encode(res) except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//retry', method='GET') @requires_auth def retry_job(jobid): # TODO: Improve this. # This will retry a job, using the same stashed Transport data, from the buildrequest table. app.log.debug("Retry job request recieved for job id:%i", jobid) job = app.config['db'].get_job(jobid) transport = app.config['db'].get_jobTransportDetails(jobid) package_version = job.packageinstance.get_package_version() package_name = job.packageinstance.get_package_name() arch = job.packageinstance.get_arch_name() # TODO: parse list dist = job.packageinstance.get_distribution_name() suite = job.packageinstance.get_suite_name() pkg_format = job.packageinstance.get_format_name() build_environment = job.packageinstance.get_buildenv_name() # Pass to controller to queue up - Pass build_environment if any. if app.config['controller'].process_job(dist,arch,package_version,package_name,suite,pkg_format,transport,build_environment): app.log.debug("Retry job processed OK!") return else: app.log.debug("Error retrying job!") return False @app.route('/', method='POST') @app.route('/', method='PUT') @requires_auth def put_job(): try: # Add a new job. Pokes simons controller code with the correct values for uri, method, vcs_id etc... packageinstance_id = request.forms.get('packageinstance_id') method = request.forms.get('method') vcs_id = request.forms.get('vcs_id') uri = request.forms.get('uri') if packageinstance_id and method and uri: packageinstance = app.config['db'].get_packageinstance_id(packageinstance_id) package_version = packageinstance.get_package_version() package_name = packageinstance.get_package_name() arch = packageinstance.get_arch_name() # TODO: parse list dist = packageinstance.get_distribution_name() suite = packageinstance.get_suite_name() pkg_format = packageinstance.get_format_name() build_environment = packageinstance.get_buildenv_name() # Pass to controller to queue up - Pass build_environment if any. transport = Transport(None, method, uri, vcs_id) if app.config['controller'].process_job(dist,arch,package_version,package_name,suite,pkg_format,transport,build_environment): return else: return False else: response.status = "400 - Required fields missing." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('/', method='GET') def get_jobid(jobid): try: # Return details for specified job ID res = app.config['db'].get_job(jobid) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No job found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//delete', method='GET') @app.route('/', method='DELETE') @requires_auth def del_jobid(jobid): try: # Deletes a specific job retval = app.config['db'].delete_job(jobid) if(retval == True): response.status = "200 DELETE OK" elif(retval == False): response.status = "404 Cannot DELETE" elif(retval == "23503"): response.status = "409 " + str(psycopg2.errorcodes.lookup(retval)) else: response.status = "500 " + str(psycopg2.errorcodes.lookup(retval)) return response.status except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None @app.route('//status', method='GET') def get_jobstatus(jobid): try: # Return status history for specified job ID res = app.config['db'].get_job_statuses(jobid) # check results returned if res: encoded = jsonpickle.encode(res) response.content_type = "application/json" return encoded else: response.status = "404 - No job found with this ID." return except Exception as e: raise Exception('Exception encountered: ' + str(e)) return None return app pybit-1.0.0/pybitweb/__init__.py0000644000175000017500000002123412107360611016474 0ustar neilneil00000000000000 from bottle import Bottle,route,run,template,debug,HTTPError,response,error,redirect,request, hook, static_file import job import lookups import buildd import package import packageinstance import os def get_app(settings, db, controller): app = Bottle() app.config={'settings' : settings, 'db' : db, 'controller' : controller} local_path = "pybitweb/static" installed_path = settings['web']['installed_path'] def getPath(): if os.path.exists(local_path): return local_path elif os.path.exists(installed_path): return installed_path else: return None # Helper which abstracts away whether its looking in /usr/share for static assets, as packages, or in a relative direcory i.e. git checkout. def getStaticResource(file_path): localpath = local_path + file_path installedpath = installed_path + file_path if os.path.exists(localpath) and os.path.isfile(localpath): return localpath elif os.path.exists(installedpath) and os.path.isfile(installedpath): return installedpath @app.error(404) def error404(error): return 'HTTP Error 404 - Not Found.' # Remove this to get more debug. #@app.error(500) #def error500(error): # return 'HTTP Error 500 - Internal Server Error.' # Things in here are applied to all requests. We need to set this header so strict browsers can query it using jquery #http://en.wikipedia.org/wiki/Cross-origin_resource_sharing @app.hook('after_request') def enable_cors(): response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, OPTIONS' @app.route('/', method='GET') def index(): return template(getStaticResource("/index.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # favicons @app.route('/favicon.ico', method='GET') def serve_favicon_ico(): response.content_type = "image/x-icon" return static_file('favicon.ico',root=getPath()) @app.route('/favicon.png', method='GET') def serve_favicon_png(): response.content_type = "image/png" return static_file('favicon.png',root=getPath()) # static resources like CSS @app.route('/bootstrap/', method='GET') def serve_static_res(filepath): return static_file(filepath, root=getPath() + "/bootstrap/") # Serve javascript resources from local system @app.route('/resources/jquery.min.js', method='GET') def serve_static_jquery(): response.content_type = "application/javascript" return static_file('jquery.min.js',root='/usr/share/javascript/jquery/') @app.route('/resources/jquery.form.min.js', method='GET') def serve_static_jquery_forms(): response.content_type = "application/javascript" return static_file('jquery.form.min.js',root='/usr/share/javascript/jquery-form/') # static HTML index page @app.route('/index.htm', method='GET') def serve_static_index(): return template(getStaticResource("/index.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML index page @app.route('/dashboard.htm', method='GET') def serve_static_dash(): return template(getStaticResource("/dashboard.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing arches @app.route('/arches.htm', method='GET') def serve_static_arches(): return template(getStaticResource("/arches.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl']) # static HTML page listing dists @app.route('/dists.htm', method='GET') def serve_static_dists(): return template(getStaticResource("/dists.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl']) # static HTML page listing formats @app.route('/formats.htm', method='GET') def serve_static_formats(): return template(getStaticResource("/formats.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl']) # static HTML page listing statuses @app.route('/statuses.htm', method='GET') def serve_static_statuses(): return template(getStaticResource("/statuses.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl']) # static HTML page listing suites @app.route('/suites.htm', method='GET') def serve_static_suites(): return template(getStaticResource("/suites.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl']) # static HTML page listing buildboxes @app.route('/buildd.htm', method='GET') def serve_static_buildboxes(): return template(getStaticResource("/buildd.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing jobs @app.route('/job.htm', method='GET') def serve_static_jobs(): return template(getStaticResource("/job.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing things @app.route('/lookups.htm', method='GET') def serve_static_lookups(): return template(getStaticResource("/lookups.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing packages @app.route('/package.htm', method='GET') def serve_static_packages(): return template(getStaticResource("/package.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing package instances @app.route('/packageinstance.htm', method='GET') def serve_static_package_instances(): return template(getStaticResource("/packageinstance.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing package instances @app.route('/envs.htm', method='GET') def serve_static_envs(): return template(getStaticResource("/envs.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) # static HTML page listing package instances @app.route('/blacklist.htm', method='GET') def serve_static_blacklist(): return template(getStaticResource("/blacklist.htm"), protocol=settings['web']['protocol'], jqueryurl=settings['web']['jqueryurl'], jqueryformurl=settings['web']['jqueryformurl'] ) app.mount('/job', job.get_job_app(settings, db, controller)) app.mount('/suite', lookups.get_suite_app(settings, db)) app.mount('/suitearch', lookups.get_suitearch_app(settings, db)) app.mount('/dist', lookups.get_dist_app(settings, db)) app.mount('/status',lookups.get_status_app(settings, db)) app.mount('/arch',lookups.get_arch_app(settings, db)) app.mount('/format', lookups.get_format_app(settings, db)) app.mount('/env', lookups.get_env_app(settings, db)) app.mount('/buildenv_suitearch', lookups.get_buildenv_suitearch_app(settings, db)) app.mount('/buildd', buildd.get_buildd_app(settings, db, controller)) app.mount('/package', package.get_packages_app(settings, db, controller)) app.mount('/packageinstance', packageinstance.get_packageinstance_app(settings, db)) app.mount('/blacklist',lookups.get_blacklist_app(settings, db)) return app pybit-1.0.0/pybitweb/static/0000755000175000017500000000000012146006064015652 5ustar neilneil00000000000000pybit-1.0.0/pybitweb/static/statuses.htm0000644000175000017500000002734312107360611020246 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Add Job Statuses

Enter the name of a status, to add it to the database. Examples of such statuses include 'Building' or 'Cancelled'.
Note that these are NOT to be edited under normal use.
Statuses

© TCL 2012

pybit-1.0.0/pybitweb/static/suites.htm0000644000175000017500000002734412107360611017710 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Add Software Suites

Enter the name of a software suite, to add it to the database. Examples of suites include 'development' or 'chickpea'.
One example of when you would need to add something to this list, is if you make a new release branch.
Suites

© TCL 2012

pybit-1.0.0/pybitweb/static/packageinstance.htm0000644000175000017500000004062312106502745021514 0ustar neilneil00000000000000 pyBit - Package Instances

PyBit - python Buildd Integration Toolkit.

Package Instances

Create a package instance Choose an existing package by name and version, then define the arch, suite, distribution, and format, for this particular instance, then click 'Submit', to add it to the database. This generally happens automatically thanks to the SVN or other VCS hook.
Slave
Existing packages instances are listed below. To add a new package instance to the database, add its details using the form above.
Package Instances
IDPackageArchitectureDistributionFormatSuiteEnvironmentMaster?

© TCL 2012

pybit-1.0.0/pybitweb/static/job.htm0000644000175000017500000006002312106502745017142 0ustar neilneil00000000000000 pyBit - Jobs

PyBit - python Buildd Integration Toolkit.

Jobs

Use this page to add or cancel jobs, as well as to view their status history and assigned Package and BuildBox.
Submit a Job To submit a job, firstly choose a Existing Package, and PackageInstance.
Next, enter the path to he source, the method to use to retrieve it (i.e. 'SVN') , and the VCS revision.

Use this dropdown to filter the list of jobs (shown below), by their status.
Current Jobs
IDPackage InstanceBuildclientMasterStatusOperations

© TCL 2012

pybit-1.0.0/pybitweb/static/package.htm0000644000175000017500000003161312106502745017766 0ustar neilneil00000000000000 pyBit - Packages

PyBit - python Buildd Integration Toolkit.

Packages

Add a Package Enter the name, and version of a package, to add it to the database.
This generally happens automatically thanks to the SVN or other VCS hook.

Existing packages are listed below, and those with active jobs may be cancelled. To add a new package to the database, add its name and version using the form above.
Packages
IDNameVersionOperations

© TCL 2012

pybit-1.0.0/pybitweb/static/arches.htm0000644000175000017500000004035012107360611017631 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Add Package Architectures

Enter the name of a package architecture, to add it to the database. Examples of such architectures include 'i386'.
One example of when you would need to add something to this list, is if a new platform becomes supported.
Architectures
Choose an existing arch to add to the list of supported ones for a particular software suite

Suite Arches
NameMaster WeightOperations

© TCL 2012

pybit-1.0.0/pybitweb/static/buildd.htm0000644000175000017500000003236312107360611017634 0ustar neilneil00000000000000 pyBit - BuildBoxes

PyBit - python Buildd Integration Toolkit.

Lookups

Add a Buildbox To add a BuildBoxes to the database, add its hostname, using the form below.
When creating a new BuildBox, prefix its name with "build_client_".
Known BuildBoxes are listed below. To add a new machine to the database, add its hostname, using the form above.
The status of the machine is shown next to it's name, as are any available operations.
Buildboxes
NameStatusOperations

© TCL 2012

pybit-1.0.0/pybitweb/static/index.htm0000644000175000017500000002435712107360611017504 0ustar neilneil00000000000000 pyBit - Lookups

PyBit

Python Buildd Integration Toolkit.

A new build system using python and RabbitMQ, providing a RESTful HTTP based API.

Learn more »

About Pybit

pyBit (πβ) - The Python Build Integration Toolkit, aims to create a distributed, cross platform build system system using python and RabbitMQ. Python-bottle is used to provide a lightweight web based API, as well as this web interface.

We aim to be flexible enough to build any combination of package types (i.e. DEB, RPM) for any arch, for any system. Currently however, we are mostly concerned with building ARM and i386 packages from SVN, to target Debian GNU/Linux.

pyBit is Free Software. For support, contact us on IRC - #pybit on irc.oftc.net irc://irc.oftc.org/pybit (Or use github issues)

Packages

Click here to register/delete packages, and package instances as well as to cancel all build jobs for a chosen package.

View details »

Jobs

Click here to add or cancel jobs, as well as to view their status history and assigned Package and BuildBox.

View details »

Build Boxes

Click here to register/delete BuildBoxes, as well as to view their status and currently assigned jobs.

View details »


© TCL 2012

pybit-1.0.0/pybitweb/static/blacklist.htm0000644000175000017500000003014212107360611020332 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Add Blacklist Rule

Blacklist rules consist of a "field" and a "regex". If there is a pattern match on the field in question , we will not build.
For example "name" and "(.*-dev)" will mean we do not autobuild any development packages, while "vcs_uri" and "(.*/users/*)" will block sources from locations such as /repo/users/jamesb/somebadcode
Blacklist Rules

© TCL 2012

pybit-1.0.0/pybitweb/static/lookups.htm0000644000175000017500000005777312107360611020101 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Lookups

This page is where you can add values such as valid package architectures, and software suites, to the database.
You can also view the table of exist values. Simply click the titles shown below to expand them for use.
Add Package Architectures Enter the name of a package architecture, to add it to the database. Examples of such architectures include 'i386'.
One example of when you would need to add something to this list, is if a new platform becomes supported.
Architectures
Add Job Statuses Enter the name of a status, to add it to the database. Examples of such statuses include 'Building' or 'Cancelled'.
Note that these are NOT to be edited under normal use.
Statuses
Add Software Distributions Enter the name of a software distribution, to add it to the database. Examples of distributions include 'Debian' or 'Red Hat'.
One example of when you would need to add something to this list, is if a new OS becomes supported.
Distributions
Add Package Formats Enter the name (File extension) of a packaging format, to add it to the database. Examples of distributions include 'deb' or 'rpm'.
One example of when you would need to add something to this list, is if you write a new build output handler.
Formats
Add Software Suites Enter the name of a software suite, to add it to the database. Examples of suites include 'development' or 'chickpea'.
One example of when you would need to add something to this list, is if you make a new release branch.
Formats

© TCL 2012

pybit-1.0.0/pybitweb/static/dists.htm0000644000175000017500000002731312107360611017516 0ustar neilneil00000000000000 pyBit - Lookups

PyBit - python Buildd Integration Toolkit.

Add Software Distributions

Enter the name of a software distribution, to add it to the database. Examples of distributions include 'Debian' or 'Red Hat'.
One example of when you would need to add something to this list, is if a new OS becomes supported.
Distributions

© TCL 2012

pybit-1.0.0/pybitweb/static/dashboard.htm0000644000175000017500000002055412106502745020324 0ustar neilneil00000000000000 pyBit - Dashboard
Unfinished Jobs
IDPackageVersionArchSuite EnvironmentBuildclientMasterStatus
pybit-1.0.0/pybitweb/static/bootstrap/0000755000175000017500000000000012146006064017667 5ustar neilneil00000000000000pybit-1.0.0/pybitweb/static/bootstrap/img/0000755000175000017500000000000012146006064020443 5ustar neilneil00000000000000pybit-1.0.0/pybitweb/static/bootstrap/img/glyphicons-halflings.png0000644000175000017500000003077712045012715025311 0ustar neilneil00000000000000PNG  IHDRtEXtSoftwareAdobe ImageReadyqe<1IDATx}ml\EW^ɺD$|nw';vю8m0kQSnSV;1KGsԩ>UoTU1cƖYuּca&#C,pؚ>kں ULW -sn3Vq~NocI~L{- H8%_M£wB6EW,ĢpY2+(Y@&A/3kXhߍ-aA<>P'\J;(}#Qz:4%m?nfntK*l9J+DIYu1YZ^(]YYEf@ОlXz]Ut u &5-PW}@t|#LY=s܂,w#+R+?Ƌax X0"ea)tG*ԡwVwV^rf%xB(qּ4>WG#lWU<ЁXJVѶlR$kDVrI7:X%X1NEzw;y9z9O%~~uɗ*=Ixcy}Y(ou ±N$^j e\iX񝜬];Y-rѲ&>!zlYaVHVN԰9=]=mRMdOUC JUiT}rWW'ڹu)ʢF"YU#P׾&ܑЅROwyzm$Os? +^FTIEq%&~ >M}]ԖwA? [Nteexn(措BdMTpʥnqqS?bWXmW6x*{V_!VjΧsVL^j XkQjU6sk̩n~[qǸ-` O:G7l"ksRe2vQ=QƼJUX`gQy~ ďKȰE]#P:td\T/u;س:Jc-%'e q ?j"/yh48Zi1|JUu>_N;hxwNU JQU7\j̮bT:B?6oJ1Ί%I UY-Ii4{=rǤ7@)HKJ+f4X8Cd?'j1 N< 39EWo VTGzg# %D0#ܠ3[tiآ( U,]125|Ṋfw7w u+Š]Db]K xbW ՛7|ВX㕛{UcGXk¬|(h)IUa)lp 3luPU]D)/7~4Wt5J}V X0z VM;>Gԙ^|gF:jaZ^)74C#jwr,еSlGu;1vm><)}ZQՖ&mZ:1UMB~ a:/᜗:KWWOҠ&Y2f7cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘g*3fF5LbN2#Tf=C`!ZGUe꣇e2V<1mkS4iϗ*.{N8Xaj~ڀnAx,%fE:|YDVj ¢lg6(:k~MM5?4 ]WO>诋WZiG|QGJeK[YcյpmjE\f/ǎ8&OQ3 .3tt2'-V8pXSrY#J!Q ",ub@FK:u^iy[]<.Cw+W\)b kr-.MtڀMqʄ۰#$^X$"V`T4m~w%Pp1|+&UxY8*r8:k7QЃҀT$Ўƙ S>~Sjs:5q.w&_Z.X=:ވbw` _kd{'0:ds#qi!224nq\9-KUTsSUuVo@;Uz>^=Np>oPO @I@'Gj5o*U>^*ew>ͫʧ᫠Q5 ̈́<$#5Jٻj6e)_ d]2B:^(*:8JYS鬆Kݗ ]U4_rj{5ׇaǑ/yV?GtGb@xPU7O3|鍪 IQ5QGw *(;wf0*PUU<YƔvbt5{2!,}Ҧ:)j2OkΪ' ֊0I.q\(%ojQĖՇa<ԍexAgt'[d;׸`rcdjPFU$UeJI6T&Z}z(z vfuz {}ۿߝݞlxUZ謊.Y岟b%nw@ǩS9|źs%>_o#9\EU~/ځt(r[QZuOo;!MrU]0TcpDő?.cPuF;L_Sb}R/J_+h2$ai UǩS9>Є}76rzu~国4oĨ 1J ^̘~iC޸55G׹]gwsn zTuO=?/zƲc>Οb#7ֻcgkޛTUj*-T=]uu}>ݨNЭ [ ]:%/_ Sz]6D.mD7Uƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1c>J4hPP+A;'G_XKmL5I.},wFFum$S-E-;Õ C3I-`BRx1ғTJݕ;hΊ8 DYJo;Yš5MKɰM;%Pd9KhnD[zgVh,'C p!^M(WK2X>UQ%^p8 ˽^#Ζ؄+.@gCz%ɔ-Pr KX n>=ՔѨeSvRLz5%9UQS \WիK'hp)ô Jrh M0F (f_R5///G+x 1"eS 5 :Tf=+7Qɧ\TEs༬rYs8&k#pSՊ5MTbD܊[Ng5Q\s5PB@[8ɨV1&4Wsy[Ǿ wU2V77jމd^~YfC_h;a.&M i UWpzs`>/"'OI۲y:BzdTq£=йb:"m/-/PWDQǴ͐57m`H%AV!Hԛ׿@"Qzދ|ߒT-*OU^Ҧ6!Cwk|h&Hd5LEYy'ƣ7%*{=)Z%ٝP *G]/8Lw$?8M)\į/#7Ufd7'6\h1 vIfEIr=1w\WKVZHKgZ͡$mx % `j}TuTQJZ*H>*QxkLFTyU-)ôbiA|q`F'+ 4^Qy xH)#t^?@]^`ARSqjgB:rK۷l<2-4YKhgQLxVwP~M Φ0l 3ƅaŊITȀhwJmxIMչ|U7xˆS~2ߕ?kW1kC3];YnSґAeXYz8,'x< k7Kx]$x$vgT#w;o@ z_Vmn|HֵhZg-^TAn- )@4[*9xKƋj>!,Vt:eqn8%ohS(2\Q^aigF3vTUDVlQꅧWc%Ueq4ҝº/U $_Q!>t| ,țG<tC[xTXmf|Q%d#jUՆ|; H[bά#,Ws7NT1~m&ǻ{' \㟾 bBKJo8%!$Qj:/RX)$Sy޳ 䍧RDUg_D軦J\jN֖SU;~?Ohssdƣ}6(T <_4b5 ^N N%8QejF7toMyө`)g[/|?өJuGL坕/=CTܠhdifHcǞG4,`D՞{'xG_p/5@m +$jVH3a"*ũ,,HJҵȸT^Qyo&IÉJUVwWLeM~3tA6rwɤ6տ \0HL%LX5c@HHÃZ|NV+7WM{cig*ȸU7iÉбzd * ?gtX8̝OX:]2ɍ]p^++>AVڛE{ DB.&/56ArxY#ܕy)cKQtȪ~! ;C}ʃtf{6$NVsj wupZ)zŁ|-wg+nMVj/d+U~ͯi:_ix whqr>駃-x뼬)ݷyR=! ì:J/lIkV@n74758Z KJ(Uxz1w)^\ԣzȪ󲦨c2f؍v+6f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘2N oC\F1ִ UZJV̚\4Mgq1z{&YT ,HX~D u\g}x>+YdN̮ol ZX+F[/j+S~2/jV8Jr^ԉ]J}J*ۏ<2԰&JݣjOM@ѯ#0O[SXB^ uze\]dd./xXE f'vO_H${%;kt7ށmő|d{aފ^ǛڎE5ʋBr]W=_SAf(0 oU5q ,_\luz˪uz㻲o=Yi~| 0+=VJت /ލzM\zCL[U:|k*^8"\Wٚ\ .XTjX5 SkFu\1 q'mģ/QUؕ*AɽDNZ׮?_[# ˍ4:^j|5LG ||øBW{6[uQF.1$qF9IHg)\5>C#uXZ$#*<ߐsRv1Tj>Jm>*#( [Fhsש5*jQʼ&&&P犛L[Q1* ;X}Iΰ[Q?qQZ Hݙ֞VEsBCZ9JTK tup˷ /O,.kUdsOHMg4=-)+ؿh2Nw/r|WQn=GIU;'j,vfdzpe$V GTYsBZO1pj:r"nTUSCgr veAۘ˜FC+Ֆ#[JTe'v9-3 Dmӻuuz?0 o hxuY &_54=f07kלU0]D:jdw/+PGUVS<\2uatc^zYRąmC+7#,|:iNw*|^sm|X>Ъ^1\#͹ &%{,2U>ݎ.c05z# ogNO+Q쓭 ,˗-%K\[S_`y+b_94"U+Ύap}I[M,B.NtwHj漬E L߀ 0DX(kڵ NoU{gquz RwkէRx'uZ[3'zyyד%sƕ3jYF\s=m1&VAɼ?k\+]6yモ1gtOIW7al|1 >$]e 7؝WIe?ަL#>| ҭ] pM5MUdI61ԠeǼYGhOn3խR:^k_'Yuuq#p# J2xl>OjcY馃!ڡ+sZ/ D}2AY mpc#<'xSKx`*W[,e|6BH)㶤kjpDU(2qzx9*tqa/, Z[ 0>Ө֜xN)fă@qըFU՝w(a;ˋ>|Tc|w2eiT]*!_\WG{ ]^݅Z5t|6oYHaO@= my^akE.uz]#٥hWv(:,6A߉JFa\ wWex>vetuMYA>).,;ɦCbwjE)W Fӫ@s4e6^Q9oI}4x<.B?B߫#$Hx.x9,a!RTpgd5xBe.L7@* AsduttSVUaRU|I xG߃$T񭟬#_IFMŒ_X@foQIDII?|%$r {ENĸwޕqq?Dؽ}}o/`ӣCTi /ywO rD 9YUD] Ή@s]+'UaL} hrU'7:sU|k)H@hNq#ϵ8y˭Xű#w 1!흉R'7fuד0p!WÖW+Nmp\-ioD$g٠˅%%ÐmV]̱rw*Z}y+L Nouj}xt)lStuqxmNyKUOnDbhf}k>6ufT%{ <񐮸mjFcmUïc;w8@dGFUA& =nq5]iP}z:k⼶-ʓ Κl*'UzaxWFdZzTNRs+# wzgi:MBqtM l#^'Gߣ*^t{=rERnQ$adJl02%Tڊ^<~g?Of*U^?:N+o[PUs|QR']V-L)H K䐞 mYn\4}YVD hR;g-'3aסM Dh}1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌk*Ț4`L$b U4\dt'>HȄ|.+Y+/Gy2OCWv3v,'kia W O6߯E=Hv $LlxI躍/}^]x\3 ɮ5 QT&G9Ay^i}O[5ޱwq4,s JJI.myE^%'VB~dׯ}*j* ~uTk\fKЬ*Y]_v'I˨鑩6Xo'j&uɧngT]oڌ9\*wVHӖ| >:5EF'J ɝ`!A e~_;5ױϊ镋m_&OVi<}"靍hW9X6KPƣ G"ƭ?/O^hCHLciPj)}QQզ#tMg9 xGw~d;_J+RỲ<;e 5/Qs/5N[!a+NPb+ѺI}-t_qU=MKʞY5no*vvbʊ{]| ~ Z{-끇^FVviϵ3Ya=6ndS;-ʹ^;uꪪ^ |=_w+"i&4l#wir|W3U$"J~O@]~tRJVMHw:̦@?>O?vdrtS*$&~1>Z}^nL(]f*&*QaIꝄ|3*O?r?*4Gyz[k/tkQϖWCCKk/x5|S*`ϹγQEwy o KYqTb$-/PtsZNKQ*>ݢU@Џ"JQ;¹& Lx;+T /+O赟> (T?ķD^N*'p$IW֐W~ =J|_UTe7ְP`;CYjk=sU[mߙ-;};2|wo1p0~>0m @Jrǟcٷ4͜?q\UUIV?2L/+Шꄾ< ܇^T ?tj\JrҀB*=km X,n}aՒIadp׷ll{\6v8RꅟҲf1F|Տ;e=\D ,D:ψrxQT◎*|{nS 9~=}ӕG~%j:Dj<ឫ:jO% $T8!jvm|'OЗ¹➱z\vsIv`Ȕʨj-^$-^G Q{m`T#c֞㸝|n.ߪN$O JUVʼt,jg-mסּNV z:(Ι*|1Ux=Yk*t MNNDUhK ؞X(刄Rv!#B_cxRŹoE5Dg>?fXQQ˔|@"աMveC>mO$H#]Y I=)_`k* :a>!X!W^wҒl'<;vwgIt_?Jh`#E:fdx=6Wu<Ӌd2di˂c#h¬c4?<HFYoVpN;ݷJ\ >` (t3{>⦊;;qFx4YcS$w.da*k|Q,+xs^K߫P^nO֮L5mIwl?-.ʲJ8 F B.-:2Ȕ!/A#b_m%I($|PZ[1G{^#o>3mw?'cx[^:Wk/`'=~֥W(gQbfv7UzM3+؍K:4|GCtA+Kʨ{@Ɩ [05E|yn4MIENDB`pybit-1.0.0/pybitweb/static/bootstrap/img/glyphicons-halflings-white.png0000644000175000017500000002111112045012715026405 0ustar neilneil00000000000000PNG  IHDRӳ{PLTEmmmⰰᒒttt󻻻bbbeeeggg𶶶xxx󛛛Ƽ몪֢UUU鿿rOtRNS#_ /oS?C kDOS_6>4!~a @1_'onҋM3BQjp&%!l"Xqr; A[<`am}43/0IPCM!6(*gK&YQGDP,`{VP-x)h7e1]W$1bzSܕcO]U;Zi'y"؆K 64Y*.v@c.};tN%DI !ZЏ5LH26 ɯ" -bE,,)ʏ B>mn6pmRO wm@V#?'CȑZ#qb|$:)/E%nRqChn%i̓}lm ?idd",`H"r.z~(bQU&)5X#EMR<*p[[%.Ọk7lIoJF lV!̡ăuH`&,zRk$|$lXbjߪdU?Σ$HW$U'HE3*խU\}( zhVk}guRk$%|T|ck獳"D_W+.Q)@ƽHbslTDR2Xm#a 3lYzj㒚#! 4J8(cvt]aT D ΅Q?^-_^$:\V $N|=(vZ'q6Z׆B5V!y3K㱿bv4xR]al!IoP@tVyL٪mlڿIUb|[*lke'*WddDӝ}\W_WߝrN?vޫ۲X%0uoui*JVƦb%}i5IYlNE-wςf_W3mI-mQ)S kTC7m<"܌bT|'$ҘR&>O p6tSN\ׯLm\r@3uT b7t.5.q3r0=8TiJ\6uF R32^'ŪxI F8O{%8kJMSȴdBEdWCYO:/ON/I_=xFE! =i:o~ y?''[͓[͓[͓[͓[ͭ.U>$PƦc%]\c:| ,eSZ,oXrX!R@Zv 0>?* <|N60;{ad2v+D^t[q!۞V}fۨϏYeॗ)Vyl|" fUq@Ǽ4Y-Y-!6aB:o%JIUQ|UKO`=\ :0x Pau@!KPdxhw1>$j΍vZdxSUA&[URd7øzk/rU^w:I.VǮc>q.!zSr&2)Wg R -iQ 8Pa\ОU%iݡU_=p Lu(N?0?Æ:]άtB%U|NsorNf ,P !v" Y6hL_@@bscqgv4||0lϟ$S9bʱj#~?o}}7sAPm:IV=n !{{hEࢪ8suoLT$;VscqD3 ༂3.DBB4&V' T `D6Ϸqyj8V*X%@s\jrN$|=5Ά 'mUiKi%CI:ssaƅ`*`=l)>u՘MeuSI_OL_}o&jzp{lu:O)s%Q@$<]f xO%PCbhr2PKpf5Në3^o]eJiB464^tuٲU֌:G4'22YpuG'/Py4?.SBP_>I 1t3ΓBɭɭɭɭVVVVVs]!67(g y@ 4>Q VF}^Xׇڼje26 L%YGh lC})< !EEPZWZV+@†R 5{@ouɐ4&H6ey V݀VťcqZޒrJyByFzFN$Hb*+jՏqэ ګkݿUXle1d0d^-B%} {Y%r*j5Ak5u",:~ҸY~ hSA~6 fulՇf{ȵQtATHZkƭ/_Sn u']b]|m`BāJ,O$du]Zs FL:aǙT4o~by?wpj滥A(x]†f~an֧/^dڲcՇ,!1i&xi_VK@ip̓9Vi%a; L?0J*Ū5U'x^6V[^ {eU|:0=0d۫o*Jq%[YN.sQLud[29I:WnmXlڃ6!lNlVէKUjV\J%UߊBLcKfb>a=b~R]aG%[js@/9MطݘU>yɲX@} Ftg^vO\Ӹwvpz3K5i!$P>ā'VƛL2r@UMKZ6tw맟¦bm1h||]}~0MjA(JJP68C&yr׉e}j_cJ?I0k>šW |Bޝ."TEXd 8!cw*E(J)![W"j_ТeX_XB;oO0~?:PC (.[!Wq%*leY)E<^KZT60.#A\5;Rmtkd/8)5~^0 #Ckgey)ͶԺ6ĥ<(?&uAVm0^h.txR*a':,H|ō l5z;8+e#b'#|}2w(|KcJ l6 w^Տoi3H R ̔9,YgPְ:N [5SR![)]i}`mN4Хv`|;f(FltL8÷Z#AO%Y)NU5YedJE3dZذݣHT1 ;8MjnʏӤqp 1h^<<>yt{?|'j)}YUU{@V/J1F+7䀉[OWO[ yUY!?BD%DWj>-Ai6xz)U R7 d@g\so)a4zf[W+> P> |qLG8vȣlj2Zt+VA6gT *ʆUz(m)CD `He/.:zN9pgo &NC׃އ>Wհ_Hj)Xe6F7pm-`'c.AZ=^e8F;{Rtn(z!S7o Iew3]bܗ85|iϠRJkʱZRO+8U&:]ZieR(JMޗ7Z@5a^\GzsρU*rMezT^:ɬͦX=>$ bi>U&XQoybbGk8 Ҙn).Սo ^MmdZi$soo*{4eLbLٳ""mx:`:mk[geTެ)'0*TB{!I ''''[͓[͓[͓[͓[]Zj Q.e '/yvQ71(Z&X?(_Z){tڀmZWϏ)-C jqn,̋"IvUL!h꛿skAcrN佚фVE40yX~4zʸV㳰%,)fqtpu~  *^0:ܲ33JO(ZB?K^ v]unlWi0p6[착C_5X#[wX3b廫R{NKAe Se|wxso>P\儔ԕ6;nVmfI$V͓J-J%֌0UwYЎSnum藮xz˗VƫIvnW_qLZ"_Xz 8]Ap?C543zw({7e*Ȳ`۰!AQ:KUnz]1yVGaCm0PY ٚUx6TT&hV9V ӬzÑ 1[XzZ9erqJND/gX*9oN6D` {I%Mz9—TQ7f\"j_3~xB'ܷY]*KЌ%"5"qxq~ƕ=jS>jV&~]2xzF1X_yD<#NRB}K/iy !V^˿eJ}/FkA7 S+.(ecJ:zWZ몖wQ~ä́p6,e5,+,tv%O^OO}ן -O7>ekC6wa_C |9*WA)UJg8=:mjUvqysܒLglC6+[FSWg9wV31A ND<$5e(s[ ۨbaF.]KIENDB`pybit-1.0.0/pybitweb/static/bootstrap/js/0000755000175000017500000000000012146006064020303 5ustar neilneil00000000000000pybit-1.0.0/pybitweb/static/bootstrap/js/bootstrap.min.js0000644000175000017500000006173312045012715023450 0ustar neilneil00000000000000/*! * Bootstrap.js by @fat & @mdo * Copyright 2012 Twitter, Inc. * http://www.apache.org/licenses/LICENSE-2.0.txt */ !function(e){e(function(){"use strict";e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()},e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e(function(){e("body").on("click.alert.data-api",t,n.prototype.close)})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")},e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e(function(){e("body").on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=n,this.options.slide&&this.slide(this.options.slide),this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},to:function(t){var n=this.$element.find(".item.active"),r=n.parent().children(),i=r.index(n),s=this;if(t>r.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){s.to(t)}):i==t?this.pause().cycle():this.slide(t>i?"next":"prev",e(r[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle()),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f=e.Event("slide",{relatedTarget:i[0]});this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u]();if(i.hasClass("active"))return;if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}},e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e(function(){e("body").on("click.carousel.data-api","[data-slide]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=!i.data("modal")&&e.extend({},i.data(),n.data());i.carousel(s),t.preventDefault()})})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning)return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning)return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}},e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=typeof n=="object"&&n;i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e(function(){e("body").on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})})}(window.jQuery),!function(e){"use strict";function r(){i(e(t)).removeClass("open")}function i(t){var n=t.attr("data-target"),r;return n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=e(n),r.length||(r=t.parent()),r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||(s.toggleClass("open"),n.focus()),!1},keydown:function(t){var n,r,s,o,u,a;if(!/(38|40|27)/.test(t.keyCode))return;n=e(this),t.preventDefault(),t.stopPropagation();if(n.is(".disabled, :disabled"))return;o=i(n),u=o.hasClass("open");if(!u||u&&t.keyCode==27)return n.click();r=e("[role=menu] li:not(.divider) a",o);if(!r.length)return;a=r.index(r.filter(":focus")),t.keyCode==38&&a>0&&a--,t.keyCode==40&&a').appendTo(document.body),this.options.backdrop!="static"&&this.$backdrop.click(e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,e.proxy(this.removeBackdrop,this)):this.removeBackdrop()):t&&t()}},e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e(function(){e("body").on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,this.options.trigger=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):this.options.trigger!="manual"&&(i=this.options.trigger=="hover"?"mouseenter":"focus",s=this.options.trigger=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this))),this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,t,this.$element.data()),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);if(!n.options.delay||!n.options.delay.show)return n.show();clearTimeout(this.timeout),n.hoverState="in",this.timeout=setTimeout(function(){n.hoverState=="in"&&n.show()},n.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var e,t,n,r,i,s,o;if(this.hasContent()&&this.enabled){e=this.tip(),this.setContent(),this.options.animation&&e.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,e[0],this.$element[0]):this.options.placement,t=/in/.test(s),e.remove().css({top:0,left:0,display:"block"}).appendTo(t?this.$element:document.body),n=this.getPosition(t),r=e[0].offsetWidth,i=e[0].offsetHeight;switch(t?s.split(" ")[1]:s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}e.css(o).addClass(s).addClass("in")}},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function r(){var t=setTimeout(function(){n.off(e.support.transition.end).remove()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.remove()})}var t=this,n=this.tip();return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?r():n.remove(),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").removeAttr("title")},hasContent:function(){return this.getTitle()},getPosition:function(t){return e.extend({},t?{top:0,left:0}:this.$element.offset(),{width:this.$element[0].offsetWidth,height:this.$element[0].offsetHeight})},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(){this[this.tip().hasClass("in")?"hide":"show"]()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}},e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'
',trigger:"hover",title:"",delay:0,html:!0}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content > *")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-content")||(typeof n.content=="function"?n.content.call(t[0]):n.content),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}}),e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'

'})}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var t=e(this),n=t.data("target")||t.attr("href"),r=/^#\w/.test(n)&&e(n);return r&&r.length&&[[r.position().top,n]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}},e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active a").last()[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}},e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e(function(){e("body").on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.$menu=e(this.options.menu).appendTo("body"),this.source=this.options.source,this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.offset(),{height:this.$element[0].offsetHeight});return this.$menu.css({top:t.top+t.height,left:t.left}),this.$menu.show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length"+t+""})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),(e.browser.chrome||e.browser.webkit||e.browser.msie)&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this))},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=!~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},blur:function(e){var t=this;setTimeout(function(){t.hide()},150)},click:function(e){e.stopPropagation(),e.preventDefault(),this.select()},mouseenter:function(t){this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")}},e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'',item:'
  • ',minLength:1},e.fn.typeahead.Constructor=t,e(function(){e("body").on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;t.preventDefault(),n.typeahead(n.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))},e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery);pybit-1.0.0/pybitweb/static/bootstrap/js/bootstrap.js0000644000175000017500000015571012045012715022665 0ustar neilneil00000000000000/* =================================================== * bootstrap-transition.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#transitions * =================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { $(function () { "use strict"; // jshint ;_; /* CSS TRANSITION SUPPORT (http://www.modernizr.com/) * ======================================================= */ $.support.transition = (function () { var transitionEnd = (function () { var el = document.createElement('bootstrap') , transEndEventNames = { 'WebkitTransition' : 'webkitTransitionEnd' , 'MozTransition' : 'transitionend' , 'OTransition' : 'oTransitionEnd otransitionend' , 'transition' : 'transitionend' } , name for (name in transEndEventNames){ if (el.style[name] !== undefined) { return transEndEventNames[name] } } }()) return transitionEnd && { end: transitionEnd } })() }) }(window.jQuery);/* ========================================================== * bootstrap-alert.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#alerts * ========================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { "use strict"; // jshint ;_; /* ALERT CLASS DEFINITION * ====================== */ var dismiss = '[data-dismiss="alert"]' , Alert = function (el) { $(el).on('click', dismiss, this.close) } Alert.prototype.close = function (e) { var $this = $(this) , selector = $this.attr('data-target') , $parent if (!selector) { selector = $this.attr('href') selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7 } $parent = $(selector) e && e.preventDefault() $parent.length || ($parent = $this.hasClass('alert') ? $this : $this.parent()) $parent.trigger(e = $.Event('close')) if (e.isDefaultPrevented()) return $parent.removeClass('in') function removeElement() { $parent .trigger('closed') .remove() } $.support.transition && $parent.hasClass('fade') ? $parent.on($.support.transition.end, removeElement) : removeElement() } /* ALERT PLUGIN DEFINITION * ======================= */ $.fn.alert = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('alert') if (!data) $this.data('alert', (data = new Alert(this))) if (typeof option == 'string') data[option].call($this) }) } $.fn.alert.Constructor = Alert /* ALERT DATA-API * ============== */ $(function () { $('body').on('click.alert.data-api', dismiss, Alert.prototype.close) }) }(window.jQuery);/* ============================================================ * bootstrap-button.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#buttons * ============================================================ * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* BUTTON PUBLIC CLASS DEFINITION * ============================== */ var Button = function (element, options) { this.$element = $(element) this.options = $.extend({}, $.fn.button.defaults, options) } Button.prototype.setState = function (state) { var d = 'disabled' , $el = this.$element , data = $el.data() , val = $el.is('input') ? 'val' : 'html' state = state + 'Text' data.resetText || $el.data('resetText', $el[val]()) $el[val](data[state] || this.options[state]) // push to event loop to allow forms to submit setTimeout(function () { state == 'loadingText' ? $el.addClass(d).attr(d, d) : $el.removeClass(d).removeAttr(d) }, 0) } Button.prototype.toggle = function () { var $parent = this.$element.closest('[data-toggle="buttons-radio"]') $parent && $parent .find('.active') .removeClass('active') this.$element.toggleClass('active') } /* BUTTON PLUGIN DEFINITION * ======================== */ $.fn.button = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('button') , options = typeof option == 'object' && option if (!data) $this.data('button', (data = new Button(this, options))) if (option == 'toggle') data.toggle() else if (option) data.setState(option) }) } $.fn.button.defaults = { loadingText: 'loading...' } $.fn.button.Constructor = Button /* BUTTON DATA-API * =============== */ $(function () { $('body').on('click.button.data-api', '[data-toggle^=button]', function ( e ) { var $btn = $(e.target) if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn') $btn.button('toggle') }) }) }(window.jQuery);/* ========================================================== * bootstrap-carousel.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#carousel * ========================================================== * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================== */ !function ($) { "use strict"; // jshint ;_; /* CAROUSEL CLASS DEFINITION * ========================= */ var Carousel = function (element, options) { this.$element = $(element) this.options = options this.options.slide && this.slide(this.options.slide) this.options.pause == 'hover' && this.$element .on('mouseenter', $.proxy(this.pause, this)) .on('mouseleave', $.proxy(this.cycle, this)) } Carousel.prototype = { cycle: function (e) { if (!e) this.paused = false this.options.interval && !this.paused && (this.interval = setInterval($.proxy(this.next, this), this.options.interval)) return this } , to: function (pos) { var $active = this.$element.find('.item.active') , children = $active.parent().children() , activePos = children.index($active) , that = this if (pos > (children.length - 1) || pos < 0) return if (this.sliding) { return this.$element.one('slid', function () { that.to(pos) }) } if (activePos == pos) { return this.pause().cycle() } return this.slide(pos > activePos ? 'next' : 'prev', $(children[pos])) } , pause: function (e) { if (!e) this.paused = true if (this.$element.find('.next, .prev').length && $.support.transition.end) { this.$element.trigger($.support.transition.end) this.cycle() } clearInterval(this.interval) this.interval = null return this } , next: function () { if (this.sliding) return return this.slide('next') } , prev: function () { if (this.sliding) return return this.slide('prev') } , slide: function (type, next) { var $active = this.$element.find('.item.active') , $next = next || $active[type]() , isCycling = this.interval , direction = type == 'next' ? 'left' : 'right' , fallback = type == 'next' ? 'first' : 'last' , that = this , e = $.Event('slide', { relatedTarget: $next[0] }) this.sliding = true isCycling && this.pause() $next = $next.length ? $next : this.$element.find('.item')[fallback]() if ($next.hasClass('active')) return if ($.support.transition && this.$element.hasClass('slide')) { this.$element.trigger(e) if (e.isDefaultPrevented()) return $next.addClass(type) $next[0].offsetWidth // force reflow $active.addClass(direction) $next.addClass(direction) this.$element.one($.support.transition.end, function () { $next.removeClass([type, direction].join(' ')).addClass('active') $active.removeClass(['active', direction].join(' ')) that.sliding = false setTimeout(function () { that.$element.trigger('slid') }, 0) }) } else { this.$element.trigger(e) if (e.isDefaultPrevented()) return $active.removeClass('active') $next.addClass('active') this.sliding = false this.$element.trigger('slid') } isCycling && this.cycle() return this } } /* CAROUSEL PLUGIN DEFINITION * ========================== */ $.fn.carousel = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('carousel') , options = $.extend({}, $.fn.carousel.defaults, typeof option == 'object' && option) , action = typeof option == 'string' ? option : options.slide if (!data) $this.data('carousel', (data = new Carousel(this, options))) if (typeof option == 'number') data.to(option) else if (action) data[action]() else if (options.interval) data.cycle() }) } $.fn.carousel.defaults = { interval: 5000 , pause: 'hover' } $.fn.carousel.Constructor = Carousel /* CAROUSEL DATA-API * ================= */ $(function () { $('body').on('click.carousel.data-api', '[data-slide]', function ( e ) { var $this = $(this), href , $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) //strip for ie7 , options = !$target.data('modal') && $.extend({}, $target.data(), $this.data()) $target.carousel(options) e.preventDefault() }) }) }(window.jQuery);/* ============================================================= * bootstrap-collapse.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#collapse * ============================================================= * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* COLLAPSE PUBLIC CLASS DEFINITION * ================================ */ var Collapse = function (element, options) { this.$element = $(element) this.options = $.extend({}, $.fn.collapse.defaults, options) if (this.options.parent) { this.$parent = $(this.options.parent) } this.options.toggle && this.toggle() } Collapse.prototype = { constructor: Collapse , dimension: function () { var hasWidth = this.$element.hasClass('width') return hasWidth ? 'width' : 'height' } , show: function () { var dimension , scroll , actives , hasData if (this.transitioning) return dimension = this.dimension() scroll = $.camelCase(['scroll', dimension].join('-')) actives = this.$parent && this.$parent.find('> .accordion-group > .in') if (actives && actives.length) { hasData = actives.data('collapse') if (hasData && hasData.transitioning) return actives.collapse('hide') hasData || actives.data('collapse', null) } this.$element[dimension](0) this.transition('addClass', $.Event('show'), 'shown') $.support.transition && this.$element[dimension](this.$element[0][scroll]) } , hide: function () { var dimension if (this.transitioning) return dimension = this.dimension() this.reset(this.$element[dimension]()) this.transition('removeClass', $.Event('hide'), 'hidden') this.$element[dimension](0) } , reset: function (size) { var dimension = this.dimension() this.$element .removeClass('collapse') [dimension](size || 'auto') [0].offsetWidth this.$element[size !== null ? 'addClass' : 'removeClass']('collapse') return this } , transition: function (method, startEvent, completeEvent) { var that = this , complete = function () { if (startEvent.type == 'show') that.reset() that.transitioning = 0 that.$element.trigger(completeEvent) } this.$element.trigger(startEvent) if (startEvent.isDefaultPrevented()) return this.transitioning = 1 this.$element[method]('in') $.support.transition && this.$element.hasClass('collapse') ? this.$element.one($.support.transition.end, complete) : complete() } , toggle: function () { this[this.$element.hasClass('in') ? 'hide' : 'show']() } } /* COLLAPSIBLE PLUGIN DEFINITION * ============================== */ $.fn.collapse = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('collapse') , options = typeof option == 'object' && option if (!data) $this.data('collapse', (data = new Collapse(this, options))) if (typeof option == 'string') data[option]() }) } $.fn.collapse.defaults = { toggle: true } $.fn.collapse.Constructor = Collapse /* COLLAPSIBLE DATA-API * ==================== */ $(function () { $('body').on('click.collapse.data-api', '[data-toggle=collapse]', function (e) { var $this = $(this), href , target = $this.attr('data-target') || e.preventDefault() || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') //strip for ie7 , option = $(target).data('collapse') ? 'toggle' : $this.data() $this[$(target).hasClass('in') ? 'addClass' : 'removeClass']('collapsed') $(target).collapse(option) }) }) }(window.jQuery);/* ============================================================ * bootstrap-dropdown.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#dropdowns * ============================================================ * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================ */ !function ($) { "use strict"; // jshint ;_; /* DROPDOWN CLASS DEFINITION * ========================= */ var toggle = '[data-toggle=dropdown]' , Dropdown = function (element) { var $el = $(element).on('click.dropdown.data-api', this.toggle) $('html').on('click.dropdown.data-api', function () { $el.parent().removeClass('open') }) } Dropdown.prototype = { constructor: Dropdown , toggle: function (e) { var $this = $(this) , $parent , isActive if ($this.is('.disabled, :disabled')) return $parent = getParent($this) isActive = $parent.hasClass('open') clearMenus() if (!isActive) { $parent.toggleClass('open') $this.focus() } return false } , keydown: function (e) { var $this , $items , $active , $parent , isActive , index if (!/(38|40|27)/.test(e.keyCode)) return $this = $(this) e.preventDefault() e.stopPropagation() if ($this.is('.disabled, :disabled')) return $parent = getParent($this) isActive = $parent.hasClass('open') if (!isActive || (isActive && e.keyCode == 27)) return $this.click() $items = $('[role=menu] li:not(.divider) a', $parent) if (!$items.length) return index = $items.index($items.filter(':focus')) if (e.keyCode == 38 && index > 0) index-- // up if (e.keyCode == 40 && index < $items.length - 1) index++ // down if (!~index) index = 0 $items .eq(index) .focus() } } function clearMenus() { getParent($(toggle)) .removeClass('open') } function getParent($this) { var selector = $this.attr('data-target') , $parent if (!selector) { selector = $this.attr('href') selector = selector && /#/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') //strip for ie7 } $parent = $(selector) $parent.length || ($parent = $this.parent()) return $parent } /* DROPDOWN PLUGIN DEFINITION * ========================== */ $.fn.dropdown = function (option) { return this.each(function () { var $this = $(this) , data = $this.data('dropdown') if (!data) $this.data('dropdown', (data = new Dropdown(this))) if (typeof option == 'string') data[option].call($this) }) } $.fn.dropdown.Constructor = Dropdown /* APPLY TO STANDARD DROPDOWN ELEMENTS * =================================== */ $(function () { $('html') .on('click.dropdown.data-api touchstart.dropdown.data-api', clearMenus) $('body') .on('click.dropdown touchstart.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() }) .on('click.dropdown.data-api touchstart.dropdown.data-api' , toggle, Dropdown.prototype.toggle) .on('keydown.dropdown.data-api touchstart.dropdown.data-api', toggle + ', [role=menu]' , Dropdown.prototype.keydown) }) }(window.jQuery);/* ========================================================= * bootstrap-modal.js v2.1.1 * http://twitter.github.com/bootstrap/javascript.html#modals * ========================================================= * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================================================= */ !function ($) { "use strict"; // jshint ;_; /* MODAL CLASS DEFINITION * ====================== */ var Modal = function (element, options) { this.options = options this.$element = $(element) .delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this)) this.options.remote && this.$element.find('.modal-body').load(this.options.remote) } Modal.prototype = { constructor: Modal , toggle: function () { return this[!this.isShown ? 'show' : 'hide']() } , show: function () { var that = this , e = $.Event('show') this.$element.trigger(e) if (this.isShown || e.isDefaultPrevented()) return $('body').addClass('modal-open') this.isShown = true this.escape() this.backdrop(function () { var transition = $.support.transition && that.$element.hasClass('fade') if (!that.$element.parent().length) { that.$element.appendTo(document.body) //don't move modals dom position } that.$element .show() if (transition) { that.$element[0].offsetWidth // force reflow } that.$element .addClass('in') .attr('aria-hidden', false) .focus() that.enforceFocus() transition ? that.$element.one($.support.transition.end, function () { that.$element.trigger('shown') }) : that.$element.trigger('shown') }) } , hide: function (e) { e && e.preventDefault() var that = this e = $.Event('hide') this.$element.trigger(e) if (!this.isShown || e.isDefaultPrevented()) return this.isShown = false $('body').removeClass('modal-open') this.escape() $(document).off('focusin.modal') this.$element .removeClass('in') .attr('aria-hidden', true) $.support.transition && this.$element.hasClass('fade') ? this.hideWithTransition() : this.hideModal() } , enforceFocus: function () { var that = this $(document).on('focusin.modal', function (e) { if (that.$element[0] !== e.target && !that.$element.has(e.target).length) { that.$element.focus() } }) } , escape: function () { var that = this if (this.isShown && this.options.keyboard) { this.$element.on('keyup.dismiss.modal', function ( e ) { e.which == 27 && that.hide() }) } else if (!this.isShown) { this.$element.off('keyup.dismiss.modal') } } , hideWithTransition: function () { var that = this , timeout = setTimeout(function () { that.$element.off($.support.transition.end) that.hideModal() }, 500) this.$element.one($.support.transition.end, function () { clearTimeout(timeout) that.hideModal() }) } , hideModal: function (that) { this.$element .hide() .trigger('hidden') this.backdrop() } , removeBackdrop: function () { this.$backdrop.remove() this.$backdrop = null } , backdrop: function (callback) { var that = this , animate = this.$element.hasClass('fade') ? 'fade' : '' if (this.isShown && this.options.backdrop) { var doAnimate = $.support.transition && animate this.$backdrop = $('