moksha.hub-1.4.1/0000755000175000017500000000000012371232660015472 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/PKG-INFO0000644000175000017500000000043012371232660016564 0ustar threebeanthreebean00000000000000Metadata-Version: 1.0 Name: moksha.hub Version: 1.4.1 Summary: Hub components for Moksha. Home-page: http://moksha.fedorahosted.org Author: Luke Macken, John (J5) Palmieri, Mairin Duffy, and Ralph Bean Author-email: UNKNOWN License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN moksha.hub-1.4.1/development.ini0000644000175000017500000001556012370150161020516 0ustar threebeanthreebean00000000000000# # moksha - Pylons development environment configuration # # The %(here)s variable will be replaced with the parent directory of this file # # This file is for deployment specific config options -- other configuration # that is always required for the app is done in the config directory, # and generally should not be modified by end users. [DEFAULT] debug = true # Uncomment and replace with the address which should receive any error reports #email_to = you@yourdomain.com smtp_server = localhost error_email_from = paste@localhost [server:main] use = egg:Paste#http host = 0.0.0.0 port = 8080 [app:main] use = egg:moksha full_stack = true #lang = ru cache_dir = %(here)s/data beaker.session.key = moksha beaker.session.secret = somesecret beaker.cache.type = memory #beaker.cache.type = ext:memcached #beaker.cache.url = localhost:11211 # If you'd like to fine-tune the individual locations of the cache data dirs # for the Cache data, or the Session saves, un-comment the desired settings # here: #beaker.cache.data_dir = %(here)s/data/cache #beaker.session.data_dir = %(here)s/data/sessions ## ## Moksha-specific configuration options ## # moksha.domain - Domain used for XSS communication. # # If your webapp and orbited live on app.live.example.com, # just set this to app.live.example.com and you're done. # # If your webapp lives on app.live.example.com and orbited lives on # orbited.live.example.com, you need to set this to a suffix of the two, like: # moksha.domain = live.example.com # moksha.domain = localhost # Enable the Moksha Extension Point Middleware moksha.extensionpoints = True # Number of seconds between polling feeds feed.poll_frequency = 35 # Where to store the feed caches. #feed_cache = sqlite:///%(here)s/feeds.db # Max age (in seconds) of each feed in the cache feed.max_age = 300 # Timeout in seconds for the web request feed.timeout = 60 # The number of simultaneous connections feed.deferred_groups = 3 # Where to initialize and store our application databases. %s is the app name. app_db = sqlite:///%(here)s/moksha_app.db # Note: for postgres/mysql, these app-specific dbs needs to be created by hand #app_db = postgres://moksha:m0ksh4@localhost/%s ## ## Moksha Live Socket configuration ## # A machine-local zeromq interprocess socket, typically moksha.monitoring.socket = ipc:///var/tmp/moksha-monitoring moksha.workers_per_consumer = 1 moksha.monitoring.socket.mode = 770 # Automatically inject the Moksha live socket with # the Global Resource Injection Widget moksha.livesocket = True # The backing protocol for live widgets. # Available backends: stomp, amqp, websocket moksha.livesocket.backend = websocket # TODO -- change the following two to 'livesocket' from socket # jGrowl notifications for socket state changes moksha.socket.notify = True # How often should the client websocket attempt to reconnect if # the moksha-hub goes away? Reconnection attempts are disabled # if this is unspecified. #moksha.socket.reconnect_interval = 5000 # Only required for websocket moksha.livesocket.websocket.port = 9998 # The location of our Orbited server orbited_host = localhost orbited_port = 9000 orbited_scheme = http ## Stomp broker configuration. ## By default we use the MorbidQ broker, run by Orbited, for development. #stomp_broker = localhost #stomp_port = 61613 ## If stomp_uri is present, stomp_broker and stomp_port are ignored #stomp_uri = localhost:61613 ## If there are multiple uris, then a failover() method is employed at runtime #stomp_uri = localhost:61613,localhost:61614 ## Authentication options #stomp_user = guest #stomp_pass = guest #stomp_ssl_crt = /path/to/an/optional.crt #stomp_ssl_key = /path/to/an/optional.key # Optional AMQP Broker #amqp_broker = guest/guest@localhost #amqp_broker_host = localhost #amqp_broker_port = 5672 #amqp_broker_user = guest #amqp_broker_pass = guest #amqp_broker_ssl = False # Optional zeroMQ pub/sub pattern zmq_enabled = True zmq_publish_endpoints = tcp://*:6543 zmq_subscribe_endpoints = tcp://127.0.0.1:6543 # Force zeromq topic subscription to only forward messages whose # topics strictly match. zmq_strict = True ## The hub can subscribe by either zmq bind or zmq connect. #zmq_subscribe_method = connect # Metrics app enabled? mdemos.metrics.stream = False # What backend to use for the feeds app? mdemos.feeds.feedtree.engine = live # Documentation directory docs_dir = docs # docs_dir = /srv/moksha/docs chat.backend = irc://irc.freenode.net:6667 chat.rooms = default chat.default.staticRoomName = moksha chat.default.roomAssignmentMode = static chat.default.display.greeting = Moksha Chat # Use a built-in IRC server #chat.backend = irc://localhost:9999 #chat.builtin = true #chat.default.display.floating = true #chat.default.display.floatingToggle = false #chat.default.display.width = 400 #chat.default.display.height = 300 #chat.default.display.theme = simple #chat.default.display.resizable = true # pick the form for your database # %(here) may include a ':' character on Windows environments; this can # invalidate the URI when specifying a SQLite db via path name # sqlalchemy.url=postgres://username:password:port@hostname/databasename # sqlalchemy.url=mysql://username:password@hostname:port/databasename # If you have sqlite, here's a simple default to get you started # in development sqlalchemy.url = sqlite:///%(here)s/devdata.db sqlalchemy.echo = true sqlalchemy.echo_pool = false sqlalchemy.pool_recycle = 3600 # WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* # Debug mode will enable the interactive debugging tool, allowing ANYONE to # execute malicious code after an exception is raised. #set debug = false # Logging configuration # Add additional loggers, handlers, formatters here # Uses python's logging config file format # http://docs.python.org/lib/logging-config-fileformat.html templating.mako.compiled_templates_dir = %(here)s/data/templates [loggers] keys = root, moksha, sqlalchemy, tg, auth, pylons [handlers] keys = console [formatters] keys = generic # If you create additional loggers, add them as a key to [loggers] [logger_root] level = INFO handlers = console [logger_moksha] level = DEBUG handlers = qualname = moksha [logger_tg] level = INFO handlers = qualname = tg # repoze.who is noisy by default [logger_auth] level = WARNING handlers = qualname = auth [logger_pylons] level = INFO handlers = qualname = pylons [logger_sqlalchemy] level = INFO handlers = qualname = sqlalchemy.engine # "level = INFO" logs SQL queries. # "level = DEBUG" logs SQL queries and results. # "level = WARN" logs neither. (Recommended for production systems.) # If you create additional handlers, add them as a key to [handlers] [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic # If you create additional formatters, add them as a key to [formatters] [formatter_generic] format = %(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S moksha.hub-1.4.1/setup.cfg0000644000175000017500000000007312371232660017313 0ustar threebeanthreebean00000000000000[egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 moksha.hub-1.4.1/README0000644000175000017500000000016012320571512016343 0ustar threebeanthreebean00000000000000====== Moksha ====== http://mokshaproject.net Documentation ------------- http://mokshaproject.net/apps/docs moksha.hub-1.4.1/COPYING0000644000175000017500000002613612320571512016531 0ustar threebeanthreebean00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. moksha.hub-1.4.1/moksha.hub.egg-info/0000755000175000017500000000000012371232660021223 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha.hub.egg-info/PKG-INFO0000644000175000017500000000043012371232660022315 0ustar threebeanthreebean00000000000000Metadata-Version: 1.0 Name: moksha.hub Version: 1.4.1 Summary: Hub components for Moksha. Home-page: http://moksha.fedorahosted.org Author: Luke Macken, John (J5) Palmieri, Mairin Duffy, and Ralph Bean Author-email: UNKNOWN License: UNKNOWN Description: UNKNOWN Platform: UNKNOWN moksha.hub-1.4.1/moksha.hub.egg-info/SOURCES.txt0000644000175000017500000000162412371232660023112 0ustar threebeanthreebean00000000000000AUTHORS COPYING MANIFEST.in README development.ini setup.py moksha/__init__.py moksha.hub.egg-info/PKG-INFO moksha.hub.egg-info/SOURCES.txt moksha.hub.egg-info/dependency_links.txt moksha.hub.egg-info/entry_points.txt moksha.hub.egg-info/namespace_packages.txt moksha.hub.egg-info/requires.txt moksha.hub.egg-info/top_level.txt moksha/hub/__init__.py moksha/hub/hub.py moksha/hub/messaging.py moksha/hub/monitoring.py moksha/hub/reactor.py moksha/hub/amqp/__init__.py moksha/hub/amqp/base.py moksha/hub/amqp/pyamqplib.py moksha/hub/amqp/qpid010.py moksha/hub/amqp/qpid08.py moksha/hub/api/__init__.py moksha/hub/api/consumer.py moksha/hub/api/producer.py moksha/hub/stomp/__init__.py moksha/hub/stomp/protocol.py moksha/hub/stomp/stomp.py moksha/hub/tests/__init__.py moksha/hub/tests/test_hub.py moksha/hub/tests/test_websockets.py moksha/hub/zeromq/__init__.py moksha/hub/zeromq/base.py moksha/hub/zeromq/zeromq.pymoksha.hub-1.4.1/moksha.hub.egg-info/dependency_links.txt0000644000175000017500000000000112371232660025271 0ustar threebeanthreebean00000000000000 moksha.hub-1.4.1/moksha.hub.egg-info/requires.txt0000644000175000017500000000005512371232660023623 0ustar threebeanthreebean00000000000000moksha.common>=1.0.6 Twisted pyzmq txZMQ txWSmoksha.hub-1.4.1/moksha.hub.egg-info/top_level.txt0000644000175000017500000000000712371232660023752 0ustar threebeanthreebean00000000000000moksha moksha.hub-1.4.1/moksha.hub.egg-info/namespace_packages.txt0000644000175000017500000000000712371232660025553 0ustar threebeanthreebean00000000000000moksha moksha.hub-1.4.1/moksha.hub.egg-info/entry_points.txt0000644000175000017500000000021212371232660024514 0ustar threebeanthreebean00000000000000 [console_scripts] moksha-hub = moksha.hub:main [moksha.stream] monitoring = moksha.hub.monitoring:MonitoringProducer moksha.hub-1.4.1/setup.py0000644000175000017500000000336312371232650017210 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup, find_packages import sys # This is required (oddly) to fix a python 2.7 bug with nose tests. try: import multiprocessing import logging except Exception: pass tests_require = [ 'nose', 'mock', 'pyzmq', 'websocket-client', ] if sys.version_info[0] == 2 and sys.version_info[1] <= 6: tests_require.extend([ 'unittest2', ]) setup( name='moksha.hub', version='1.4.1', description='Hub components for Moksha.', author='Luke Macken, John (J5) Palmieri, Mairin Duffy, and Ralph Bean', author_email='', url='http://moksha.fedorahosted.org', install_requires=[ "moksha.common>=1.0.6", "Twisted", #"stomper", "pyzmq", "txZMQ", "txWS", #"python-daemon", ], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', namespace_packages=['moksha'], tests_require=tests_require, entry_points=""" [console_scripts] moksha-hub = moksha.hub:main [moksha.stream] monitoring = moksha.hub.monitoring:MonitoringProducer """, ) moksha.hub-1.4.1/moksha/0000755000175000017500000000000012371232660016754 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/0000755000175000017500000000000012371232660017532 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/amqp/0000755000175000017500000000000012371232660020470 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/amqp/pyamqplib.py0000644000175000017500000001075712326012501023040 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken import logging from moksha.common.lib.converters import asbool from moksha.hub.amqp.base import BaseAMQPHubExtension log = logging.getLogger(__name__) NONPERSISTENT_DELIVERY = PERSISTENT_DELIVERY = range(1, 3) class AMQPLibHubExtension(BaseAMQPHubExtension): """ An AMQPHub implemention using the amqplib module """ def __init__(self, hub, config): import amqplib.client_0_8 as amqp self.config = config broker = self.config.get('amqp_broker') ssl = asbool(self.config.get('amqp_broker_ssl', False)) use_threading = asbool(self.config.get('amqp_broker_threaded', False)) username = self.config.get('amqp_broker_username', 'guest') password = self.config.get('amqp_broker_password', 'guest') self.conn = amqp.Connection( host=broker, ssl=ssl, use_threading=use_threading, userid=username, password=password ) self.channel = self.conn.channel() self.channel.access_request( '/data', active=True, write=True, read=True) super(AMQPLibHubExtension, self).__init__() def create_queue(self, queue, exchange='amq.fanout', durable=True, exclusive=False, auto_delete=False): """ Declare a `queue` and bind it to an `exchange` """ if not queue in self.queues: log.info("Creating %s queue" % queue) self.channel.queue_declare(queue, durable=durable, exclusive=exclusive, auto_delete=auto_delete) def exchange_declare(self, exchange, type='fanout', durable=True, auto_delete=False): self.channel.exchange_declare(exchange=exchange, type=type, durable=durable, auto_delete=auto_delete) def queue_bind(self, queue, exchange, routing_key=''): self.channel.queue_bind(queue, exchange, routing_key=routing_key) def send_message(self, topic, message, **headers): """ Send an AMQP message to a given exchange with the specified routing key """ import amqplib.client_0_8 as amqp msg = amqp.Message(message, **headers) msg.properties["delivery_mode"] = headers.get( "delivery_mode", PERSISTENT_DELIVERY) self.channel.basic_publish( msg, headers.get('exchange', 'amq.topic'), routing_key=topic ) super(AMQPLibHubExtension, self).send_message( topic, message, **headers) def subscribe(self, topic, callback): queue_name = str(uuid.uuid4()) self.queue_declare(queue=queue_name, exclusive=True, auto_delete=True) self.exchange_bind(queue_name, binding_key=topic) self.queue_subscribe(queue_name, callback) super(AMQPLibHubExtension, self).subscribe(topic, callback) def get_message(self, queue): """ Immediately grab a message from the queue. This call will not block, and will return None if there are no new messages in the queue. """ msg = self.channel.basic_get(queue, no_ack=True) return msg def queue_subscribe(self, queue, callback, no_ack=True): """ Consume messages from a given `queue`, passing each to `callback` """ self.channel.basic_consume(queue, callback=callback, no_ack=no_ack) def wait(self): self.channel.wait() def close(self): try: if hasattr(self, 'channel') and self.channel: self.channel.close() except Exception as e: log.exception(e) try: if hasattr(self, 'conn') and self.conn: self.conn.close() except Exception as e: log.exception(e) moksha.hub-1.4.1/moksha/hub/amqp/__init__.py0000644000175000017500000000244312326012501022572 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Here is where we configure which AMQP hub implementation we are going to use. """ import logging log = logging.getLogger("moksha.hub") try: from moksha.hub.amqp.qpid010 import QpidAMQPHubExtension AMQPHubExtension = QpidAMQPHubExtension except ImportError: log.warn("Cannot find qpid python module. Make sure you have python-qpid installed.") try: from moksha.hub.amqp.pyamqplib import AMQPLibHubExtension AMQPHubExtension = AMQPLibHubExtension except ImportError: log.warn("Cannot find pyamqplib") log.warn("Using FakeHub AMQP broker. Don't expect AMQP to work") class FakeHub(object): pass AMQPHub = FakeHub moksha.hub-1.4.1/moksha/hub/amqp/qpid08.py0000644000175000017500000000565412320571512022155 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Obsolete Qpid AMQP 0.8 implementation. ## # AMQP 0.8 legacy modules import qpid from qpid.client import Client from qpid.content import Content class QpidAMQP08Hub(BaseAMQPHub): client = None def __init__(self, broker, username=None, password=None, ssl=False): """ Initialize the Moksha Hub. `broker` [amqps://][[/]@][:] """ self.set_broker(broker) self.init_qpid_connection() # We need 0.8 for RabbitMQ self.amqp_spec=qpid.spec08.load('/usr/share/amqp/amqp.0-8.xml') def set_broker(self, broker): self.url = URL(broker) self.user = self.url.password or 'guest' self.password = self.url.password or 'guest' self.host = self.url.host if self.url.scheme == URL.AMQPS: self.ssl = True default_port = 5671 else: self.ssl = False default_port = 5672 self.port = self.url.port or default_port def init_qpid_connection(self): self.client = Client(self.host, self.port, spec=self.amqp_spec) self.client.start({'LOGIN': self.user, 'PASSWORD': self.password}) self.conn = self.client.channel(1) self.conn.channel_open() print "opened channel!" def create_queue(self, queue, routing_key, exchange='amq.topic', auto_delete=False, durable=True, **kw): self.conn.queue_declare(queue=queue, auto_delete=auto_delete, durable=durable, **kw) self.conn.queue_bind(queue=queue, exchange=exchange, routing_key=routing_key) print "Created %s queue" % queue def send_message(self, message, exchange='amq.topic', routing_key=''): self.conn.basic_publish(routing_key=routing_key, content=Content(message), exchange=exchange) def get(self, queue): t = self.conn.basic_consume(queue=queue, no_ack=True) print "t.consumer_tag =", t.consumer_tag q = self.client.queue(t.consumer_tag) msg = q.get() print "got message: ", msg return msg.content.body q.close() def close(self): if self.conn: print "Closing connection" self.conn.close() moksha.hub-1.4.1/moksha/hub/amqp/qpid010.py0000644000175000017500000001134112320571512022214 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken import logging from qpid.util import connect, URL, ssl from qpid.datatypes import Message, uuid4, RangedSet from qpid.connection import Connection from qpid.session import SessionClosed from moksha.hub.amqp.base import BaseAMQPHubExtension log = logging.getLogger('moksha.hub') class QpidAMQPHubExtension(BaseAMQPHubExtension): """ Initialize the Moksha Hub. `broker` [amqps://][[/]@][:] """ def __init__(self, hub, config): self.config = config self.set_broker(self.config.get('amqp_broker')) self.socket = connect(self.host, self.port) if self.url.scheme == URL.AMQPS: self.socket = ssl(self.socket) self.connection = Connection(sock=self.socket, username=self.user, password=self.password) self.connection.start() log.info("Connected to AMQP Broker %s" % self.host) self.session = self.connection.session(str(uuid4())) self.local_queues = [] super(QpidAMQPHubExtension, self).__init__() def set_broker(self, broker): self.url = URL(broker) self.user = self.url.password or 'guest' self.password = self.url.password or 'guest' self.host = self.url.host if self.url.scheme == URL.AMQPS: self.ssl = True default_port = 5671 else: self.ssl = False default_port = 5672 self.port = self.url.port or default_port def send_message(self, topic, message, **headers): headers['routing_key'] = headers.get('routing_key', topic) props = self.session.delivery_properties(**headers) msg = Message(props, message) self.session.message_transfer( destination=headers.get('exchange', 'amq.topic'), message=msg) super(QpidAMQPHubExtension, self).send_message( topic, message, **headers) def subscribe_queue(self, server_queue_name, local_queue_name): queue = self.session.incoming(local_queue_name) self.session.message_subscribe(queue=server_queue_name, destination=local_queue_name) queue.start() return queue def queue_declare(self, queue, durable=True, exclusive=False, auto_delete=False, **kw): self.session.queue_declare(queue=queue, exclusive=exclusive, auto_delete=auto_delete, arguments={'qpid.max_count': 0, 'qpid.max_size': 0}, **kw) def exchange_bind(self, queue, exchange='amq.topic', binding_key=None): self.session.exchange_bind(exchange=exchange, queue=queue, binding_key=binding_key) def message_subscribe(self, queue, destination): return self.session.message_subscribe(queue=queue, destination=destination) def message_accept(self, message): try: self.session.message_accept(RangedSet(message.id)) except SessionClosed: log.debug("Accepted message on closed session: %s" % message.id) pass def subscribe(self, topic, callback): queue_name = '_'.join([ "moksha_consumer", self.session.name, str(uuid4()), ]) server_queue_name = local_queue_name = queue_name self.queue_declare(queue=server_queue_name, exclusive=True, auto_delete=True) self.exchange_bind(server_queue_name, binding_key=topic) self.local_queues.append(self.session.incoming(local_queue_name)) self.message_subscribe(queue=server_queue_name, destination=local_queue_name) self.local_queues[-1].start() self.local_queues[-1].listen(callback) super(QpidAMQPHubExtension, self).subscribe(topic, callback) def close(self): self.session.close(timeout=2) self.connection.close(timeout=2) self.socket.close() moksha.hub-1.4.1/moksha/hub/amqp/base.py0000644000175000017500000000311512320571512021750 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken from moksha.hub.messaging import MessagingHubExtension class BaseAMQPHubExtension(MessagingHubExtension): """ A skeleton class for what we expect from an AMQP implementation. This allows us to bounce between different AMQP modules without too much pain and suffering. """ conn = None def __init__(self): """ Initialize a connection to a specified broker. This method must set self.channel to an active channel. """ pass def send_message(self, topic, message, **headers): pass def subscribe(self, topic, callback): pass def create_queue(self, queue, exchange, durable, exclusive, auto_delete): raise NotImplementedError def bind_queue(self, queue, exchange): raise NotImplementedError def wait(self): """ Block for new messages """ raise NotImplementedError def close(self): raise NotImplementedError moksha.hub-1.4.1/moksha/hub/messaging.py0000644000175000017500000000201612320571512022054 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken class MessagingHubExtension(object): """ A generic messaging hub. This class represents the base functionality of the protocol-level hubs. """ def __init__(self): pass def send_message(self, topic, message, **headers): pass def subscribe(self, topic, callback): pass def unsubscribe(self, callback): pass moksha.hub-1.4.1/moksha/hub/hub.py0000644000175000017500000003602412370147671020675 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken # Ralph Bean import os import six import sys import json as JSON from collections import defaultdict from moksha.common.lib.helpers import appconfig # Look in the current directory for egg-info if os.getcwd() not in sys.path: sys.path.insert(0, os.getcwd()) import pkg_resources import logging from twisted.internet import protocol from txws import WebSocketFactory from moksha.common.lib.helpers import get_moksha_config_path from moksha.common.lib.converters import asbool AMQPHubExtension, StompHubExtension, ZMQHubExtension = None, None, None try: from moksha.hub.amqp import AMQPHubExtension except ImportError: pass try: from moksha.hub.stomp import StompHubExtension except ImportError: pass try: from moksha.hub.zeromq import ZMQHubExtension except ImportError as e: pass log = logging.getLogger('moksha.hub') _hub = None from moksha.hub import NO_CONFIG_MESSAGE def find_hub_extensions(config): """ Return a tuple of hub extensions found in the config file. """ possible_bases = { 'amqp_broker': AMQPHubExtension, 'stomp_broker': StompHubExtension, 'stomp_uri': StompHubExtension, 'zmq_enabled': ZMQHubExtension, } broker_vals = [config.get(k, None) for k in possible_bases.keys()] # If we're running outside of middleware and hub, load config if not any(broker_vals): config_path = get_moksha_config_path() if not config_path: raise ValueError(NO_CONFIG_MESSAGE) cfg = appconfig('config:' + config_path) config.update(cfg) broker_vals = [config.get(k, None) for k in possible_bases.keys()] # If there are no brokers defined.. that's a problem. if not any(broker_vals): raise ValueError("No messaging methods defined.") if len(list(filter(None, broker_vals))) > 1: log.warning("Running with multiple brokers. " "This mode is experimental and may or may not work") extensions = set([ b for k, b in possible_bases.items() if config.get(k, None) and b ]) return extensions class MokshaHub(object): topics = None # {topic_name: [callback,]} def __init__(self, config, topics=None): self.config = config if not self.topics: self.topics = defaultdict(list) if topics is None: topics = {} for topic, callbacks in topics.items(): if not isinstance(callbacks, list): callbacks = [callbacks] for callback in callbacks: self.topics[topic].append(callback) self.extensions = [ ext(self, config) for ext in find_hub_extensions(config) ] def send_message(self, topic, message, jsonify=True): """ Send a message to a specific topic. :topic: A topic or list of topics to send the message to. :message: The message body. Can be a string, list, or dict. :jsonify: To automatically encode non-strings to JSON """ if jsonify: message = JSON.dumps(message) if not isinstance(topic, list): topics = [topic] else: topics = topic for topic in topics: if isinstance(topic, six.text_type): # txzmq isn't smart enough to handle unicode yet. # Try removing this and sending a unicode topic in the future # to see if it works. topic = topic.encode('utf-8') for ext in self.extensions: ext.send_message(topic, message) def close(self): try: for ext in self.extensions: if hasattr(ext, 'close'): ext.close() except Exception as e: log.warning('Exception when closing MokshaHub: %r' % e) def unsubscribe(self, callback): """ This removes the callback from any backends where it can be found. """ for ext in self.extensions: ext.unsubscribe(callback) def subscribe(self, topic, callback): """ This method will cause the specified `callback` to be executed with each message that goes through a given topic. """ for ext in self.extensions: ext.subscribe(topic, callback) def consume_amqp_message(self, message): self.message_accept(message) try: topic = message.get('delivery_properties').routing_key except AttributeError: # If we receive an AMQP message without a toipc, don't # proxy it to STOMP return # TODO -- this isn't extensible. how should forwarding work if there # are three broker types enabled? for ext in self.extensions: if StompHubExtension and isinstance(ext, StompHubExtension): ext.send_message(self, topic.encode('utf8'), message.body.encode('utf8')) def consume_stomp_message(self, message): from moksha.hub.reactor import reactor topic = message['headers'].get('destination') if not topic: log.debug("Got message without a topic: %r" % message) return # FIXME: only do this if the consumer wants it `jsonified` try: body = JSON.loads(message['body']) except Exception as e: log.warning('Cannot decode message from JSON: %s' % e) #body = {} body = message['body'] # feed all of our consumers for callback in self.topics.get(topic, []): reactor.callInThread(callback, {'body': body, 'topic': topic}) class CentralMokshaHub(MokshaHub): """ The Moksha Hub is responsible for initializing all of the Hooks, AMQP queues, exchanges, etc. """ producers = None # [,] def __init__(self, config, consumers=None, producers=None): log.info('Loading the Moksha Hub') self.topics = defaultdict(list) # These are used to override the entry-points behavior self._consumers = consumers self._producers = producers super(CentralMokshaHub, self).__init__(config) # FIXME -- this needs to be reworked. # TODO -- consider moving this to the AMQP specific modules for ext in self.extensions: if AMQPHubExtension and isinstance(ext, AMQPHubExtension): self.__init_amqp() self.__init_consumers() self.__init_producers() self.__init_websocket_server() def __init_websocket_server(self): from moksha.hub.reactor import reactor if self.config.get('moksha.livesocket.backend', 'amqp') != 'websocket': return log.info("Enabling websocket server") port = int(self.config.get('moksha.livesocket.websocket.port', 0)) if not port: raise ValueError("websocket is backend, but no port set") interface = self.config.get('moksha.livesocket.websocket.interface') interface = interface or '' class RelayProtocol(protocol.Protocol): moksha_hub = self def send_to_ws(self, zmq_message): """ Callback. Sends a message to the browser """ msg = JSON.dumps({ 'topic': zmq_message.topic, 'body': JSON.loads(zmq_message.body), }) self.transport.write(msg) def connectionLost(self, reason): log.info("Lost Websocket connection. Cleaning up.") self.moksha_hub.unsubscribe(self.send_to_ws) def dataReceived(self, data): """ Messages sent from the browser arrive here. This hook: 1) Acts on any special control messages 2) Forwards messages onto the zeromq hub """ try: data = data.decode('utf-8') json = JSON.loads(data) if json['topic'] == '__topic_subscribe__': # If this is a custom control message, then subscribe. _topic = json['body'] log.info("Websocket subscribing to %r." % _topic) self.moksha_hub.subscribe(_topic, self.send_to_ws) else: # FIXME - The following is disabled temporarily until # we can devise a secure method of "firewalling" where # messages can and can't go. See the following for # more info: # https://fedorahosted.org/moksha/ticket/245 # https://github.com/gregjurman/zmqfirewall key = 'moksha.livesocket.websocket.client2server' if asbool(self.moksha_hub.config.get(key, False)): # Simply forward on the message through the hub. self.moksha_hub.send_message( json['topic'], json['body'], ) except Exception as e: import traceback log.error(traceback.format_exc()) class RelayFactory(protocol.Factory): def buildProtocol(self, addr): return RelayProtocol() self.websocket_server = reactor.listenTCP( port, WebSocketFactory(RelayFactory()), interface=interface, ) log.info("Websocket server set to run on port %r" % port) # TODO -- consider moving this to the AMQP specific modules def __init_amqp(self): # Ok this looks odd at first. I think this is only used when # we are briding stomp/amqp. Since each producer and consumer # opens up their own AMQP connections anyway if not (StompHubExtension and isinstance(self, StompHubExtension)): return log.debug("Initializing local AMQP queue...") self.server_queue_name = 'moksha_hub_' + self.session.name self.queue_declare(queue=self.server_queue_name, exclusive=True, auto_delete=True) self.exchange_bind(self.server_queue_name, binding_key='#') self.local_queue_name = 'moksha_hub' self.local_queue = self.session.incoming(self.local_queue_name) self.message_subscribe(queue=self.server_queue_name, destination=self.local_queue_name) self.local_queue.start() self.local_queue.listen(self.consume_amqp_message) @property def num_consumers(self): return len([ c for c in self.consumers if getattr(c, '_initialized', None)]) @property def num_producers(self): return len([ p for p in self.producers if getattr(p, '_initialized', None)]) def __init_consumers(self): """ Instantiate and run the consumers """ log.info('Loading Consumers') if self._consumers is None: log.debug("Loading from entry-points.") self._consumers = [] for consumer in pkg_resources.iter_entry_points('moksha.consumer'): try: c = consumer.load() self._consumers.append(c) except Exception as e: log.exception("Failed to load %r consumer." % consumer.name) else: log.debug("Loading explicitly passed entry-points.") self.consumers = [] for c_class in self._consumers: try: c = c_class(self) if not getattr(c, "_initialized", None): log.info("%s:%s not initialized." % ( c_class.__module__, c_class.__name__,)) self.consumers.append(c) # This can be dynamically assigned during instantiation topic = c.topic if topic not in self.topics: self.topics[topic] = [] if c.consume not in self.topics[topic]: self.topics[topic].append(c.consume) except Exception as e: log.exception("Failed to init %r consumer." % c_class) def __init_producers(self): """ Initialize all producers (aka data streams) """ log.info('Loading Producers') if self._producers is None: log.debug("Loading from entry-points.") self._producers = [] for producer in sum([ list(pkg_resources.iter_entry_points(epoint)) for epoint in ('moksha.producer', 'moksha.stream') ], []): try: p = producer.load() self._producers.append(p) except Exception as e: log.exception("Failed to load %r producer." % producer.name) else: log.debug("Loading explicitly passed entry-points.") self.producers = [] for producer_class in self._producers: log.info('Initializing %s producer' % producer_class.__name__) try: producer_obj = producer_class(self) if not getattr(producer_obj, "_initialized", None): log.info("%s:%s not initialized." % ( producer_class.__module__, producer_class.__name__,)) self.producers.append(producer_obj) except Exception as e: log.exception("Failed to init %r producer." % producer_class) def create_topic(self, topic): if AMQPHubExtension and self.amqp_broker: AMQPHubExtension.create_queue(topic) # @@ remove this when we keep track of this in a DB if topic not in self.topics: self.topics[topic] = [] def close(self): log.debug("Stopping the CentralMokshaHub") super(CentralMokshaHub, self).close() if self.producers: while self.producers: producer = self.producers.pop() log.debug("Stopping producer %s" % producer) producer.stop() if self.consumers: while self.consumers: consumer = self.consumers.pop() log.debug("Stopping consumer %s" % consumer) consumer.stop() # For backwards compatibility stop = close if __name__ == '__main__': from moksha.hub import main main() moksha.hub-1.4.1/moksha/hub/stomp/0000755000175000017500000000000012371232660020674 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/stomp/__init__.py0000644000175000017500000000123712326012501022776 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from moksha.hub.stomp.stomp import StompHubExtension moksha.hub-1.4.1/moksha/hub/stomp/protocol.py0000644000175000017500000000535412326012501023104 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken # # Based on code from stomper's examples # (c) Oisin Mulvihill, 2007-07-26. # License: http://www.apache.org/licenses/LICENSE-2.0 import logging try: # stomper is not ready for py3 import stomper from stomper.stompbuffer import StompBuffer from twisted.internet.protocol import Protocol class Base(Protocol, stomper.Engine): pass except ImportError: Base = object log = logging.getLogger(__name__) class StompProtocol(Base): def __init__(self, client, username='', password=''): stomper.Engine.__init__(self) self.username = username self.password = password self.counter = 1 self.client = client self.buffer = StompBuffer() def connected(self, msg): """Once connected, subscribe to message queues """ stomper.Engine.connected(self, msg) log.info("StompProtocol Connected: session %s." % msg['headers']['session']) self.client.connected() #f = stomper.Frame() #f.unpack(stomper.subscribe(topic)) #print f #return f.pack() def ack(self, msg): """Processes the received message. I don't need to generate an ack message. """ #stomper.Engine.ack(self, msg) #log.info("SENDER - received: %s " % msg['body']) return stomper.NO_REPONSE_NEEDED def subscribe(self, dest, **headers): f = stomper.Frame() f.unpack(stomper.subscribe(dest)) f.headers.update(headers) self.transport.write(f.pack()) def connectionMade(self): """ Register with stomp server """ cmd = stomper.connect(self.username, self.password) self.transport.write(cmd) def dataReceived(self, data): """Data received, react to it and respond if needed """ self.buffer.appendData(data) while True: msg = self.buffer.getOneMessage() if msg is None: break returned = self.react(msg) if returned: self.transport.write(returned) self.client.hub.consume_stomp_message(msg) moksha.hub-1.4.1/moksha/hub/stomp/stomp.py0000644000175000017500000001164112326012501022401 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken # Ralph Bean try: import stomper except ImportError: pass import logging from twisted.internet.protocol import ClientFactory from moksha.hub.stomp.protocol import StompProtocol from moksha.hub.messaging import MessagingHubExtension from moksha.hub.reactor import reactor log = logging.getLogger('moksha.hub') class StompHubExtension(MessagingHubExtension, ClientFactory): username = None password = None proto = None frames = None def __init__(self, hub, config): self.config = config self.hub = hub self._topics = hub.topics.keys() self._frames = [] uri = self.config.get('stomp_uri', None) if not uri: port = self.config.get('stomp_port', 61613) host = self.config.get('stomp_broker') uri = "%s:%i" % (host, port) # A list of addresses over which we emulate failover() self.addresses = [pair.split(":") for pair in uri.split(',')] self.address_index = 0 # An exponential delay used to back off if we keep failing. self._delay = 0.1 self.username = self.config.get('stomp_user', 'guest') self.password = self.config.get('stomp_pass', 'guest') self.key = self.config.get('stomp_ssl_key', None) self.crt = self.config.get('stomp_ssl_crt', None) self.connect(self.addresses[self.address_index], self.key, self.crt) super(StompHubExtension, self).__init__() def connect(self, address, key=None, crt=None): host, port = address if key and crt: log.info("connecting encrypted to %r %r %r" % ( host, int(port), self)) from twisted.internet import ssl with open(key) as key_file: with open(crt) as cert_file: client_cert = ssl.PrivateCertificate.loadPEM( key_file.read() + cert_file.read()) ssl_context = client_cert.options() reactor.connectSSL(host, int(port), self, ssl_context) else: log.info("connecting unencrypted to %r %r %r" % ( host, int(port), self)) reactor.connectTCP(host, int(port), self) def buildProtocol(self, addr): self._delay = 0.1 log.info("build protocol was called with %r" % addr) self.proto = StompProtocol(self, self.username, self.password) return self.proto def connected(self): for topic in self._topics: log.info('Subscribing to %s topic' % topic) self.subscribe(topic, callback=lambda msg: None) self._topics = [] for frame in self._frames: log.info('Flushing queued frame') self.proto.transport.write(frame.pack()) self._frames = [] def clientConnectionLost(self, connector, reason): log.info('Lost connection. Reason: %s' % reason) self.failover() def clientConnectionFailed(self, connector, reason): log.error('Connection failed. Reason: %s' % reason) self.failover() def failover(self): self.address_index = (self.address_index + 1) % len(self.addresses) args = (self.addresses[self.address_index], self.key, self.crt,) self._delay = self._delay * (1 + (2.0 / len(self.addresses))) log.info('(failover) reconnecting in %f seconds.' % self._delay) reactor.callLater(self._delay, self.connect, *args) def send_message(self, topic, message, **headers): f = stomper.Frame() f.unpack(stomper.send(topic, message)) if not self.proto: log.info("Queueing stomp frame for later delivery") self._frames.append(f) else: self.proto.transport.write(f.pack()) super(StompHubExtension, self).send_message(topic, message, **headers) def subscribe(self, topic, callback): # FIXME -- note, the callback is just thrown away here. if not self.proto: log.info("queuing topic for later subscription %r." % topic) if topic not in self._topics: self._topics.append(topic) else: log.info("sending subscription to the protocol") self.proto.subscribe(topic) super(StompHubExtension, self).subscribe(topic, callback) moksha.hub-1.4.1/moksha/hub/zeromq/0000755000175000017500000000000012371232660021047 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/zeromq/zeromq.py0000644000175000017500000002122712335417102022736 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Ralph Bean import logging import six import socket import time import txzmq import zmq from kitchen.text.converters import to_bytes from moksha.common.lib.converters import asbool from moksha.hub.zeromq.base import BaseZMQHubExtension log = logging.getLogger('moksha.hub') # TODO -- is there a better thing to use in this thing's place? A dict-like # object that also supports __getattr__ access would be ideal. class ZMQMessage(object): def __init__(self, topic, body): self.topic = topic self.body = body def __json__(self): return {'topic': self.topic, 'body': self.body} def __repr__(self): return "" % (self.topic, self.body) def hostname2ipaddr(endpoint): """ Utility function to convert "tcp://hostname:port" to "tcp://ip:port" Why? -- http://bit.ly/Jwdf6v """ hostname = endpoint[endpoint.rfind('/') + 1:endpoint.rfind(':')] ip_addrs = socket.gethostbyname_ex(hostname)[2] log.info("Resolving %s to %r" % (hostname, ip_addrs)) for addr in ip_addrs: yield endpoint.replace(hostname, addr) def splat2ipaddr(endpoint): return [endpoint.replace("*", "127.0.0.1")] class ZMQHubExtension(BaseZMQHubExtension): def __init__(self, hub, config): self.config = config self.validate_config(self.config) self.strict = asbool(self.config.get('zmq_strict', False)) self.subscriber_factories = {} self.context = zmq.Context(1) # Configure txZMQ to use our highwatermark and keepalive if we have 'em self.connection_cls = txzmq.ZmqSubConnection self.connection_cls.highWaterMark = \ config.get('high_water_mark', 0) self.connection_cls.tcpKeepalive = \ config.get('zmq_tcp_keepalive', 0) self.connection_cls.tcpKeepaliveCount = \ config.get('zmq_tcp_keepalive_cnt', 0) self.connection_cls.tcpKeepaliveIdle = \ config.get('zmq_tcp_keepalive_idle', 0) self.connection_cls.tcpKeepaliveInterval = \ config.get('zmq_tcp_keepalive_intvl', 0) self.connection_cls.reconnectInterval = \ config.get('zmq_reconnect_ivl', 100) self.connection_cls.reconnectIntervalMax = \ config.get('zmq_reconnect_ivl_max', 100) # Set up the publishing socket self.pub_socket = self.context.socket(zmq.PUB) _endpoints = self.config.get('zmq_publish_endpoints', '').split(',') for endpoint in (e for e in _endpoints if e): log.info("Binding publish socket to '%s'" % endpoint) try: self.pub_socket.bind(endpoint) except zmq.ZMQError: map(self.pub_socket.bind, hostname2ipaddr(endpoint)) # Factory used to lazily produce subsequent subscribers self.twisted_zmq_factory = txzmq.ZmqFactory() # Establish a list of subscription endpoints for later use _endpoints = self.config['zmq_subscribe_endpoints'].split(',') method = self.config.get('zmq_subscribe_method', 'connect') if method == 'bind': _endpoints = sum(map(list, map(hostname2ipaddr, _endpoints)), []) else: # Required for zeromq-3.x. _endpoints = sum(map(list, map(splat2ipaddr, _endpoints)), []) self.sub_endpoints = [ txzmq.ZmqEndpoint(method, ep) for ep in _endpoints ] # This is required so that the publishing socket can fully set itself # up before we start trying to send messages on it. This is a # documented zmq issue that they do not plan to fix. time.sleep(1) super(ZMQHubExtension, self).__init__() def validate_config(self, config): if not asbool(config.get('zmq_enabled', False)): raise ValueError("zmq_enabled not set to True") required_attrs = ['zmq_publish_endpoints', 'zmq_subscribe_endpoints'] for attr in required_attrs: if not config.get(attr, None): log.warn("No '%s' set. Are you sure?" % attr) continue endpoints = config[attr].split(',') for endpoint in endpoints: if 'localhost' in endpoint: # This is why http://bit.ly/Jwdf6v raise ValueError("'localhost' in %s is disallowed" % attr) def send_message(self, topic, message, **headers): if isinstance(topic, six.text_type): topic = topic.encode('utf-8') if isinstance(message, six.text_type): message = message.encode('utf-8') try: self.pub_socket.send_multipart([topic, message]) except zmq.ZMQError as e: log.warn("Couldn't send message: %r" % e) super(ZMQHubExtension, self).send_message(topic, message, **headers) def unsubscribe(self, callback): for endpoint, factory in self.subscriber_factories.items(): kill_list = [] for intercept_func in factory._moksha_callbacks: if intercept_func.handled_callback == callback: kill_list.append(intercept_func) for intercept_func in kill_list: factory._moksha_callbacks.remove(intercept_func) def subscribe(self, topic, callback): original_topic = topic # Mangle topic for zmq equivalence with AMQP topic = topic.replace('*', '') for endpoint in self.sub_endpoints: log.debug("Subscribing to %s on '%r'" % (topic, endpoint)) if endpoint in self.subscriber_factories: log.debug("Using cached txzmq factory.") s = self.subscriber_factories[endpoint] else: log.debug("Creating new txzmq factory.") try: s = self.subscriber_factories[endpoint] = \ self.connection_cls( self.twisted_zmq_factory, endpoint) except zmq.ZMQError as e: log.warn("Failed txzmq create on %r %r" % (endpoint, e)) continue def chain_over_moksha_callbacks(_body, _topic): if isinstance(_topic, six.binary_type): _topic = _topic.decode('utf-8') if isinstance(_body, six.binary_type): _body = _body.decode('utf-8') for f in s._moksha_callbacks: f(_body, _topic) s._moksha_callbacks = [] s.gotMessage = chain_over_moksha_callbacks def intercept(_body, _topic): """ The purpose of this intercept callback is twofold: - Callbacks from txzmq are called with two arguments, body and topic but moksha is expecting an object which has a 'body' attribute. We create that object and pass it on here. - 0mq topic-matching works differently than AMQP and STOMP. By default, subscribing to 'abc' will get you messages tagged 'abc' but also messages sent on the topic 'abcfoo' and 'abcbar'. Moksha introduces a custom parameter 'strict' (zmq_strict in the config file) that disallows this behavior. """ if self.strict and _topic != topic: return None elif not self.strict and not _topic.startswith(topic): # This second clause is a symptom that I'm doing something # wrong. The filtering should all be handled inside txZMQ # by setsockopt. return None return callback(ZMQMessage(_topic, _body)) intercept.handled_callback = callback # bookkeeping s._moksha_callbacks.append(intercept) s.subscribe(to_bytes(topic, encoding='utf8')) super(ZMQHubExtension, self).subscribe(original_topic, callback) def close(self): self.pub_socket.close() self.context.term() moksha.hub-1.4.1/moksha/hub/zeromq/__init__.py0000644000175000017500000000137012326012501023147 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Here is where we configure which zeromq hub implementation we are going to use. """ from moksha.hub.zeromq.zeromq import ZMQHubExtension moksha.hub-1.4.1/moksha/hub/zeromq/base.py0000644000175000017500000000266012320571512022333 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken from moksha.hub.messaging import MessagingHubExtension class BaseZMQHubExtension(MessagingHubExtension): """ A skeleton class for what we expect from a zeromq implementation. This allows us to bounce between different zeromq modules without too much pain and suffering. """ def __init__(self): super(BaseZMQHubExtension, self).__init__() def send_message(self, topic, message, **headers): super(BaseZMQHubExtension, self).send_message(topic, message, **headers) def subscribe(self, topic, callback): super(BaseZMQHubExtension, self).subscribe(topic, callback) def unsubscribe(self, callback): super(BaseZMQHubExtension, self).unsubscribe(callback) def close(self): super(BaseZMQHubExtension, self).close() moksha.hub-1.4.1/moksha/hub/__init__.py0000644000175000017500000000667012370147671021662 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import logging import signal import sys import os try: from twisted.internet.error import ReactorNotRunning except ImportError: # Twisted 8.2.0 on RHEL5 class ReactorNotRunning(object): pass from moksha.common.lib.helpers import appconfig from moksha.common.lib.helpers import get_moksha_config_path log = logging.getLogger('moksha.hub') NO_CONFIG_MESSAGE = """ Cannot find Moksha configuration! Place a development.ini or production.ini in /etc/moksha or in the current directory. """ from moksha.hub.hub import CentralMokshaHub def setup_logger(verbose): logging.basicConfig() root = logging.getLogger() handler = root.handlers[0] level = verbose and logging.DEBUG or logging.INFO root.setLevel(level) format = logging.Formatter( '[%(name)12s] %(levelname)s %(asctime)s %(message)s') handler.setFormatter(format) def main(options=None, consumers=None, producers=None, framework=True): """ The main MokshaHub method """ # If we're running as a framework, then we're strictly calling other # people's code. So, as the outermost piece of software in the stack, we're # responsible for setting up logging. # If we're not running as a framework, but as a library, then someone else # is calling us. Therefore, we'll let them set up the logging themselves. if framework: setup_logger('-v' in sys.argv or '--verbose' in sys.argv) config = {} if not options: if sys.argv[-1].endswith('.ini'): config_path = os.path.abspath(sys.argv[-1]) else: config_path = get_moksha_config_path() if not config_path: print(NO_CONFIG_MESSAGE) return cfg = appconfig('config:' + config_path) config.update(cfg) else: config.update(options) hub = CentralMokshaHub(config, consumers=consumers, producers=producers) global _hub _hub = hub def handle_signal(signum, stackframe): from moksha.hub.reactor import reactor if signum in [signal.SIGHUP, signal.SIGINT]: hub.stop() try: reactor.stop() except ReactorNotRunning: pass signal.signal(signal.SIGHUP, handle_signal) signal.signal(signal.SIGINT, handle_signal) log.info("Running the MokshaHub reactor") from moksha.hub.reactor import reactor threadcount = config.get('moksha.threadpool_size', None) if not threadcount: N = int(config.get('moksha.workers_per_consumer', 1)) threadcount = 1 + hub.num_producers + hub.num_consumers * N threadcount = int(threadcount) log.info("Suggesting threadpool size at %i" % threadcount) reactor.suggestThreadPoolSize(threadcount) reactor.run(installSignalHandlers=False) log.info("MokshaHub reactor stopped") moksha.hub-1.4.1/moksha/hub/api/0000755000175000017500000000000012371232660020303 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/api/consumer.py0000644000175000017500000001551712370075145022523 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ :mod:`moksha.hub.api.consumer` - The Moksha Consumer API ======================================================== Moksha provides a simple API for creating "consumers" of message topics. This means that your consumer is instantiated when the MokshaHub is initially loaded, and receives each message for the specified topic through the :meth:`Consumer.consume` method. .. moduleauthor:: Luke Macken """ import json import threading import logging log = logging.getLogger('moksha.hub') try: import queue # py3 except ImportError: import Queue as queue # py2 from kitchen.iterutils import iterate from moksha.common.lib.helpers import create_app_engine import moksha.hub.reactor class Consumer(object): """ A message consumer """ topic = '' # Automatically decode JSON data jsonify = True # Internal use only _initialized = False _exception_count = 0 def __init__(self, hub): self.hub = hub self.log = log # Set up a queue to communicate between the main twisted thread # receiving raw messages, and a worker thread that pulls items off # the queue to do "consume" work. self.incoming = queue.Queue() callback = self._consume if self.jsonify: callback = self._consume_json for topic in iterate(self.topic): log.debug('Subscribing to consumer topic %s' % topic) self.hub.subscribe(topic, callback) # If the consumer specifies an 'app', then setup `self.engine` to # be a SQLAlchemy engine, along with a configured DBSession app = getattr(self, 'app', None) self.engine = self.DBSession = None if app: log.debug("Setting up individual engine for consumer") from sqlalchemy.orm import sessionmaker self.engine = create_app_engine(app, hub.config) self.DBSession = sessionmaker(bind=self.engine)() self.N = int(self.hub.config.get('moksha.workers_per_consumer', 1)) for i in range(self.N): moksha.hub.reactor.reactor.callInThread(self._work) self._initialized = True def __json__(self): if self._initialized: backlog = self.incoming.qsize() else: backlog = None return { "name": type(self).__name__, "module": type(self).__module__, "topic": self.topic, "initialized": self._initialized, "exceptions": self._exception_count, "jsonify": self.jsonify, "backlog": backlog, } def debug(self, message): idx = threading.current_thread().ident log.debug("%r thread %r | %s" % (type(self).__name__, idx, message)) def _consume_json(self, message): """ Convert our AMQP messages into a consistent dictionary format. This method exists because our STOMP & AMQP message brokers consume messages in different formats. This causes our messaging abstraction to leak into the consumers themselves. :Note: We do not pass the message headers to the consumer (in this AMQP consumer) because the current AMQP.js bindings do not allow the client to change them. Thus, we need to throw any topic/queue details into the JSON body itself. """ try: body = json.loads(message.body) except: log.debug("Unable to decode message body to JSON: %r" % message.body) body = message.body topic = None # Try some stuff for AMQP: try: topic = message.headers[0].routing_key except TypeError: # We didn't get a JSON dictionary pass except AttributeError: # We didn't get headers or a routing key? pass # If that didn't work, it might be zeromq if not topic: try: topic = message.topic except AttributeError: # Weird. I have no idea... pass try: message_as_dict = {'body': body, 'topic': topic} self._consume(message_as_dict) self._exception_count = 0 # Reset if everything went swimmingly except Exception: # Otherwise, keep track of how many exceptions we've hit in a row self._exception_count = self._exception_count + 1 # And then re-raise the exception to be logged raise def _consume(self, message): self.incoming.put(message) def _work(self): while True: # This is a blocking call. It waits until a message is available. message = self.incoming.get() # Then we are being asked to quit if message is StopIteration: break self.debug("Worker thread picking a message.") try: self.validate(message) except Exception as e: log.warn("Received invalid message %r" % e) continue try: self.pre_consume(message) except Exception as e: self.log.exception(message) try: self.consume(message) except Exception as e: self.log.exception(message) try: self.post_consume(message) except Exception as e: self.log.exception(message) self.debug("Going back to waiting on the incoming queue.") self.debug("Worker thread exiting.") def validate(self, message): """ Override to implement your own validation scheme. """ pass def pre_consume(self, message): pass def consume(self, message): raise NotImplementedError def post_consume(self, message): pass def send_message(self, topic, message): try: self.hub.send_message(topic, message) except Exception as e: log.error('Cannot send message: %s' % e) def stop(self): for i in range(getattr(self, 'N', 0)): self.incoming.put(StopIteration) if hasattr(self, 'hub'): self.hub.close() if getattr(self, 'DBSession', None): self.DBSession.close() moksha.hub-1.4.1/moksha/hub/api/__init__.py0000644000175000017500000000130012326012501022374 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .consumer import * from .producer import * from moksha.hub.hub import MokshaHub moksha.hub-1.4.1/moksha/hub/api/producer.py0000644000175000017500000000755112370075146022513 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ :mod:`moksha.hub.api.producer - The Moksha Producer API ======================================================= """ import logging import time from datetime import timedelta import moksha.hub.reactor from moksha.common.lib.helpers import create_app_engine log = logging.getLogger('moksha.hub') class Producer(object): """ The parent Producer class. """ # Internal use only _initialized = False _exception_count = 0 def __init__(self, hub): self.hub = hub self.log = log # If the stream specifies an 'app', then setup `self.engine` to # be a SQLAlchemy engine for that app, along with a configured # DBSession app = getattr(self, 'app', None) self.engine = self.DBSession = None if app: log.debug("Setting up individual engine for producer") from sqlalchemy.orm import sessionmaker self.engine = create_app_engine(app) self.DBSession = sessionmaker(bind=self.engine)() self._initialized = True def __json__(self): return { "name": type(self).__name__, "module": type(self).__module__, "initialized": self._initialized, "exceptions": self._exception_count, } def send_message(self, topic, message): try: self.hub.send_message(topic, message) except Exception as e: log.error('Cannot send message: %s' % e) def stop(self): if hasattr(self, 'hub') and self.hub: self.hub.close() if hasattr(self, 'DBSession') and self.DBSession: self.DBSession.close() class PollingProducer(Producer): """ A self-polling producer This class represents a data stream that wakes up at a given frequency, and calls the :meth:`poll` method. """ frequency = None # Either a timedelta object, or the number of seconds now = False die = False def __init__(self, hub): super(PollingProducer, self).__init__(hub) if isinstance(self.frequency, timedelta): self.frequency = self.frequency.seconds + \ (self.frequency.days * 24 * 60 * 60) + \ (self.frequency.microseconds / 1000000.0) log.debug("Setting a %s second timer" % self.frequency) moksha.hub.reactor.reactor.callInThread(self._work) def __json__(self): data = super(PollingProducer, self).__json__() data.update({ "frequency": self.frequency, "now": self.now, }) return data def poll(self): raise NotImplementedError def _poll(self): try: self.poll() self._exception_count = 0 # Reset to 0 if things are gravy except Exception: # Otherwise, keep track of how many exceptions we hit in a row self._exception_count = self._exception_count + 1 # And re-raise the exception so it can be logged. raise def _work(self): # If asked to, we can fire immediately at startup if self.now: self._poll() while not self.die: time.sleep(self.frequency) self._poll() def stop(self): super(PollingProducer, self).stop() self.die = True moksha.hub-1.4.1/moksha/hub/monitoring.py0000644000175000017500000000512612370150161022267 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Ralph Bean from moksha.hub.api import PollingProducer import os import string import zmq import json import logging log = logging.getLogger(__name__) class MonitoringProducer(PollingProducer): frequency = 5 ctx = None socket = None def __init__(self, hub, *args, **kwargs): key = 'moksha.monitoring.socket' endpoint = hub.config.get(key) if not endpoint: log.info("No %r defined. Monitoring disabled." % key) return log.info("Establishing monitor sock at %r" % endpoint) # Set up a special socket for ourselves self.ctx = zmq.Context() self.socket = self.ctx.socket(zmq.PUB) self.socket.bind(endpoint) # If this is a unix socket (which is almost always is) then set some # permissions so that whatever monitoring service is deployed can talk # to us. mode = hub.config.get('moksha.monitoring.socket.mode') if endpoint.startswith("ipc://") and mode: mode = string.atoi(mode, base=8) path = endpoint.split("ipc://")[-1] os.chmod(path, mode) super(MonitoringProducer, self).__init__(hub, *args, **kwargs) def serialize(self, obj): if isinstance(obj, list): return [self.serialize(item) for item in obj] elif isinstance(obj, dict): return dict([(k, self.serialize(v)) for k, v in obj.items()]) elif hasattr(obj, '__json__'): return obj.__json__() return obj def poll(self): data = { "consumers": self.serialize(self.hub.consumers), "producers": self.serialize(self.hub.producers), } if self.socket: self.socket.send(json.dumps(data)) def stop(self): super(MonitoringProducer, self).stop() if self.socket: self.socket.close() self.socket = None if self.ctx: self.ctx.term() self.ctx = None moksha.hub-1.4.1/moksha/hub/tests/0000755000175000017500000000000012371232660020674 5ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/tests/test_hub.py0000644000175000017500000003104312346320316023062 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test Moksha's Hub """ import threading import moksha try: import unittest2 as unittest except ImportError: import unittest from time import sleep, time from uuid import uuid4 import moksha.common.testtools.utils as testutils import moksha.hub.api from moksha.hub.hub import MokshaHub, CentralMokshaHub from moksha.hub.reactor import reactor as _reactor from nose.tools import (eq_, assert_true, assert_false) # Some constants used throughout the hub tests sleep_duration = 0.25 secret = "secret_message" def simulate_reactor(duration=sleep_duration): """ Simulate running the reactor for `duration` milliseconds """ global _reactor start = time() while time() - start < duration: _reactor.doPoll(0.0001) _reactor.runUntilCurrent() class TestHub(unittest.TestCase): def _setUp(self): def kernel(config): self.hub = MokshaHub(config=config) self.topic = str(uuid4()) for __setup, name in testutils.make_setup_functions(kernel): yield __setup, name def _tearDown(self): self.hub.close() @testutils.crosstest def test_hub_creation(self): """ Test that we can simply create the hub. """ assert_true(self.hub) eq_(self.hub.topics, {}) @testutils.crosstest def test_hub_send_recv(self): "Test that we can send a message and receive it." messages_received = [] def callback(json): messages_received.append(json.body[1:-1]) self.hub.subscribe(topic=self.topic, callback=callback) sleep(sleep_duration) self.hub.send_message(topic=self.topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, [secret]) @testutils.crosstest def test_hub_no_subscription(self): "Test that we don't receive messages we're not subscribed for." messages_received = [] def callback(json): messages_received.append(json.body[1:-1]) self.hub.send_message(topic=self.topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, []) class TestConsumer: def _setUp(self): def kernel(config): self.hub = MokshaHub(config=config) self.a_topic = a_topic = str(uuid4()) for __setup, name in testutils.make_setup_functions(kernel): yield __setup, name def _tearDown(self): self.hub.close() def fake_register_consumer(self, cons): """ Fake register a consumer, not by entry-point like usual. Normally, consumers are identified by the hub by way of entry-points Ideally, this test would register the TestConsumer on the moksha.consumers entry point, and the hub would pick it up. I'm not sure how to do that, so we're going to fake it and manually add this consumer to the list of consumers of which the Hub is aware. """ self.hub.topics[cons.topic] = self.hub.topics.get(cons.topic, []) self.hub.topics[cons.topic].append(cons(self.hub).consume) sleep(sleep_duration) @testutils.crosstest def test_abstract(self): """ Ensure that conumsers with no consume method raise exceptions. """ class StillAbstractConsumer(moksha.hub.api.consumer.Consumer): pass try: c = StillAbstractConsumer(self.hub) c.consume("foo") assert(False) except NotImplementedError as e: pass @testutils.crosstest def test_receive_without_json(self): """ Try sending/receiving messages without jsonifying. """ messages_received = [] class TestConsumer(moksha.hub.api.consumer.Consumer): jsonify = False topic = self.a_topic def _consume(self, message): messages_received.append(message) self.fake_register_consumer(TestConsumer) # Now, send a generic message to that topic, and see if we get one. self.hub.send_message(topic=self.a_topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(len(messages_received), 1) @testutils.crosstest def test_receive_str(self): """ Send a message Consume and verify it. """ messages_received = [] class TestConsumer(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) self.fake_register_consumer(TestConsumer) # Now, send a generic message to that topic, and see if the consumer # processed it. self.hub.send_message(topic=self.a_topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, [secret]) @testutils.crosstest def test_receive_str_double(self): """ Send a message. Have two consumers consume it. """ messages_received = [] class TestConsumer1(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) class TestConsumer2(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) self.fake_register_consumer(TestConsumer1) self.fake_register_consumer(TestConsumer2) # Now, send a generic message to that topic, and see if the consumer # processed it. self.hub.send_message(topic=self.a_topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, [secret, secret]) @testutils.crosstest def test_receive_str_near_miss(self): """ Send a message. Three consumers. Only one receives. """ messages_received = [] class BaseConsumer(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) class Consumer1(BaseConsumer): pass class Consumer2(BaseConsumer): topic = BaseConsumer.topic[:-1] class Consumer3(BaseConsumer): topic = BaseConsumer.topic + "X" self.fake_register_consumer(Consumer1) self.fake_register_consumer(Consumer2) self.fake_register_consumer(Consumer3) # Now, send a generic message to that topic, and see if Consumer1 # processed it but that Consumer2 and Consumer3 didn't self.hub.send_message(topic=self.a_topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, [secret]) @testutils.crosstest def test_receive_dict(self): """ Send a dict with a message. Consume, extract, and verify it. """ obj = {'secret': secret} messages_received = [] class TestConsumer(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): obj = message['body'] messages_received.append(obj['secret']) self.fake_register_consumer(TestConsumer) # Now, send a generic message to that topic, and see if the consumer # processed it. self.hub.send_message(topic=self.a_topic, message=obj) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(messages_received, [secret]) @testutils.crosstest def test_receive_n_messages(self): """ Send `n` messages, receive `n` messages. """ n_messages = 10 messages_received = [] class TestConsumer(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) self.fake_register_consumer(TestConsumer) # Now, send n messages and make sure that n messages were consumed. for i in range(n_messages): self.hub.send_message(topic=self.a_topic, message=secret) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(len(messages_received), n_messages) @testutils.crosstest def test_receive_n_dicts(self): """ Send `n` dicts, receive `n` dicts. """ n_messages = 10 obj = {'secret': secret} messages_received = [] class TestConsumer(moksha.hub.api.consumer.Consumer): topic = self.a_topic def _consume(self, message): messages_received.append(message['body']) self.fake_register_consumer(TestConsumer) # Now, send n objects and make sure that n objects were consumed. for i in range(n_messages): self.hub.send_message(topic=self.a_topic, message=obj) simulate_reactor(sleep_duration) sleep(sleep_duration) eq_(len(messages_received), n_messages) @testutils.crosstest def test_dynamic_topic(self): """ Test that a topic can be set at runtime (not import time) """ class TestConsumer(moksha.hub.api.consumer.Consumer): topic = "bad topic" def __init__(self, *args, **kw): super(TestConsumer, self).__init__(*args, **kw) self.topic = "good topic" def _consume(self, message): pass # Just a little fake config. config = dict( zmq_enabled=True, zmq_subscribe_endpoints='', zmq_published_endpoints='', ) central = CentralMokshaHub(config, [TestConsumer], []) # Guarantee that "bad topic" is not in the topics list. eq_(list(central.topics.keys()), ["good topic"]) @testutils.crosstest def test_open_and_close(self): """ Test that a central hub with a consumer can be closed.. ;) """ class TestConsumer(moksha.hub.api.consumer.Consumer): topic = "whatever" def _consume(self, message): pass # Just a little fake config. config = dict( zmq_enabled=True, zmq_subscribe_endpoints='', zmq_published_endpoints='', ) central = CentralMokshaHub(config, [TestConsumer], []) central.close() class TestProducer: def _setUp(self): def kernel(config): self.hub = MokshaHub(config=config) self.a_topic = a_topic = str(uuid4()) for __setup, name in testutils.make_setup_functions(kernel): yield __setup, name def _tearDown(self): self.hub.close() def fake_register_producer(self, prod): """ Fake register a producer, not by entry-point like usual. Registering producers is a little easier than registering consumers. The MokshaHub doesn't even keep track of the .poll method callbacks. We simply instantiate the producer (and it registers itself with the hub). """ return prod(self.hub) @testutils.crosstest def test_produce_ten_strs(self): """ Produce ten-ish strings. """ messages_received = [] class TestProducer(moksha.hub.api.producer.PollingProducer): topic = self.a_topic frequency = sleep_duration / 10.9 called = 0 def poll(self): self.called = self.called + 1 # Ready? prod = self.fake_register_producer(TestProducer) def killer(): sleep(sleep_duration) prod.die = True threading.Thread(target=killer).start() prod._work() # Finally, the check. Did we get our ten messages? (or about as much) assert prod.called > 8 assert prod.called < 12 @testutils.crosstest def test_idempotence(self): """ Test that running the same test twice still works. """ return self.test_produce_ten_strs() moksha.hub-1.4.1/moksha/hub/tests/__init__.py0000644000175000017500000000000012320571512022767 0ustar threebeanthreebean00000000000000moksha.hub-1.4.1/moksha/hub/tests/test_websockets.py0000644000175000017500000002267012326012501024453 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Test Moksha's Websocket Server. ZeroMQ only for now. """ import moksha import json import websocket import copy try: import unittest2 as unittest except ImportError: import unittest from time import sleep, time from uuid import uuid4 from moksha.hub.hub import CentralMokshaHub from moksha.hub.reactor import reactor as _reactor from nose.tools import eq_, assert_true, assert_false, raises # TODO -- these are duplicated in test_hub.. they should be imported # Some constants used throughout the hub tests sleep_duration = 0.25 secret = "secret_message" # TODO -- this is duplicated in test_hub. It should be imported from a common # place. def simulate_reactor(duration=sleep_duration): """ Simulate running the reactor for `duration` milliseconds """ global _reactor start = time() while time() - start < duration: _reactor.doPoll(0.0001) _reactor.runUntilCurrent() class TestWebSocketServer(unittest.TestCase): def setUp(self): config = { 'moksha.livesocket': True, 'moksha.livesocket.backend': 'websocket', 'moksha.socket.notify': True, 'moksha.livesocket.websocket.port': 8009, "zmq_publish_endpoints": "tcp://*:6543", "zmq_subscribe_endpoints": "tcp://127.0.0.1:6543", "zmq_enabled": True, 'zmq_strict': False, } self.hub = CentralMokshaHub(config=config) self.topic = str(uuid4()) def tearDown(self): self.hub.close() if hasattr(self.hub, 'websocket_server'): retval = self.hub.websocket_server.stopListening() # It can take some time to unregister our WS server from its port simulate_reactor(sleep_duration) def test_ws_subscribe_and_recv(self): """ Test that we can subscribe for and receive a message. """ self.received_message = None import threading class client_thread(threading.Thread): def run(thread): ws = websocket.WebSocket() ws.settimeout(5) ws.connect("ws://127.0.0.1:{port}/".format( port=self.hub.config['moksha.livesocket.websocket.port'], )) ws.send(json.dumps(dict( topic="__topic_subscribe__", body=self.topic, ))) # Receive that.. message = ws.recv().decode('utf-8') self.received_message = json.loads(message)['body'] ws.close() client = client_thread() client.start() # Process the connection from the client-thread. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) # Now, send a message... self.hub.send_message( topic=self.topic, message=secret, ) # Process the sending of our special message. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) client.join() eq_(self.received_message, secret) def test_ws_subscribe_multiple(self): """ Test that we can subscribe to a few different topics. """ self.received_messages = [] import threading num_topics = 3 class client_thread(threading.Thread): def run(thread): ws = websocket.WebSocket() ws.settimeout(5) ws.connect("ws://127.0.0.1:{port}/".format( port=self.hub.config['moksha.livesocket.websocket.port'], )) for i in range(num_topics): ws.send(json.dumps(dict( topic="__topic_subscribe__", body=self.topic + "_" + str(i), ))) # Receive that.. for i in range(num_topics): try: self.received_messages.append( json.loads(ws.recv().decode('utf-8'))['body'] ) except Exception: pass ws.close() client = client_thread() client.start() # Process the connection from the client-thread. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) # Now, send a message... for i in range(num_topics): self.hub.send_message( topic=self.topic + "_" + str(i), message=secret, ) # Process the sending of our special message. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) client.join() eq_(self.received_messages, [secret] * num_topics) def test_ws_subscribe_filter(self): """ Test that the WS server only sends desired topics. """ self.received_messages = [] import threading num_topics = 1 class client_thread(threading.Thread): def run(thread): ws = websocket.WebSocket() ws.settimeout(5) ws.connect("ws://127.0.0.1:{port}/".format( port=self.hub.config['moksha.livesocket.websocket.port'], )) for i in range(num_topics): ws.send(json.dumps(dict( topic="__topic_subscribe__", body=self.topic + "_" + str(i), ))) # Receive that.. for i in range(num_topics + 1): try: self.received_messages.append( json.loads(ws.recv().decode('utf-8'))['body'] ) except Exception: pass ws.close() client = client_thread() client.start() # Process the connection from the client-thread. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) # Now, send a message... for i in range(num_topics + 1): self.hub.send_message( topic=self.topic + "_" + str(i), message=secret, ) # Process the sending of our special message. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) client.join() eq_(self.received_messages, [secret] * num_topics) def test_ws_multiple_clients_different_topics(self): """ Test that the WS server can differentiate clients. """ import threading num_topics = 2 class client_thread(threading.Thread): def run(thread): thread.received_messages = [] ws = websocket.WebSocket() ws.settimeout(5) ws.connect("ws://127.0.0.1:{port}/".format( port=self.hub.config['moksha.livesocket.websocket.port'], )) for i in range(num_topics): ws.send(json.dumps(dict( topic="__topic_subscribe__", body=thread.topic + "_" + str(i), ))) # Receive that.. for i in range(num_topics + 2): try: thread.received_messages.append( json.loads(ws.recv().decode('utf-8'))['body'] ) except Exception: pass ws.close() client1 = client_thread() client2 = client_thread() client1.topic = self.topic + "_topic_1" client2.topic = self.topic + "_topic_2" client1.received_messages = [] client2.received_messages = [] client1.start() client2.start() # Process the connection from the client-thread. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) # Now, send a message... for i in range(num_topics): self.hub.send_message( topic=self.topic + "_topic_1" + "_" + str(i), message=secret + "_1", ) self.hub.send_message( topic=self.topic + "_topic_2" + "_" + str(i), message=secret + "_2", ) # Process the sending of our special message. simulate_reactor(sleep_duration) sleep(sleep_duration) simulate_reactor(sleep_duration) client1.join() client2.join() eq_(client1.received_messages, [secret + "_1"] * num_topics) eq_(client2.received_messages, [secret + "_2"] * num_topics) if __name__ == '__main__': unittest.main() moksha.hub-1.4.1/moksha/hub/reactor.py0000644000175000017500000000230012320571512021532 0ustar threebeanthreebean00000000000000# This file is part of Moksha. # Copyright (C) 2008-2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Authors: Luke Macken """ Choses the best platform-specific Twisted reactor """ import sys try: if 'linux' in sys.platform: from twisted.internet import epollreactor epollreactor.install() elif 'freebsd' in sys.platform or 'darwin' in sys.platform: from twisted.internet import kqreactor kqreactor.install() elif 'win' in sys.platform: from twisted.internet import iocpreactor iocpreactor.install() except (ImportError, AssertionError): # reactor already installed pass from twisted.internet import reactor moksha.hub-1.4.1/moksha/__init__.py0000644000175000017500000000007012320571512021056 0ustar threebeanthreebean00000000000000__import__('pkg_resources').declare_namespace(__name__) moksha.hub-1.4.1/AUTHORS0000644000175000017500000000027712320571512016544 0ustar threebeanthreebean00000000000000Luke Macken John (J5) Palmieri Mairin Duffy Ralph Bean Logo design by Michael Langlie moksha.hub-1.4.1/MANIFEST.in0000644000175000017500000000007712320571512017230 0ustar threebeanthreebean00000000000000include LICENSE README AUTHORS COPYING include development.ini