pycadf-0.4.1/0000775000175400017540000000000012301706030014110 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/test-requirements.txt0000664000175400017540000000044512301705746020371 0ustar jenkinsjenkins00000000000000# Hacking already pins down pep8, pyflakes and flake8 hacking>=0.8.0,<0.9 coverage>=3.6 discover fixtures>=0.3.14 mock>=1.0 mox>=0.5.3 python-subunit>=0.0.18 testrepository>=0.0.17 testscenarios>=0.4 testtools>=0.9.34 # this is required for the docs build jobs sphinx>=1.1.2,<1.2 oslosphinx pycadf-0.4.1/ChangeLog0000664000175400017540000000377412301706030015675 0ustar jenkinsjenkins00000000000000CHANGES ======= 0.4.1 ----- * catch empty json body 0.4 --- * add constant for security service * audit middleware in pycadf * oslo common code sync and requirements cleanup * Rollback change to that Install configs into /etc * Update .gitreview after repo rename * Install configs into /etc, not /usr/etc 0.3.1 ----- * update audit_map * update build_typeURI to drop query string * sync common code and requirements * add path to event * mask token values * adjust typeURI to capture target better * Python 3: update setup.cfg to advertise python 3 compatibility 0.3 --- * Python3: do not index a dict_keys object * Python3: use six.moves.urllib.parse instead of urlparse * Python 3: the request body should be bytes in test_api.py * Python 3: use six.with_metaclass * Python 3: replace 'basestring' by 'six.string_types' * Python 3: Use six.moves.configparser rather than ConfigParser * sync requirements and oslo 0.2.2 ----- * add namespace to all ids * improve model validation * update requirements * update oslo * Updated from global requirements * do not set typeURI in resource shortform 0.2.1 ----- * switch list action to read/list * support namespace prefix in id * observer and reporter should be valid Resource 0.2 --- * fix conf file settings * Switch to oslo.config 1.2.0 final 0.1.9 ----- * pycadf 0.1.8 broke oslo 0.1.8 ----- * update tox to 1.6 * DNS names may not map to service catalog values 0.1.7 ----- * ensure unique id and timestamp defaults 0.1.6 ----- * bump oslo.config req to 1.2.0a3 0.1.5 ----- * update cadf spec to support new data model * update to use global requirements * Package api_audit_map.conf 0.1.4 ----- * add event to CADF_EVENT 0.1.3 ----- * add support for no response and failed request audit * move cadf correlation id under req.environ 0.1.2 ----- * append cadf event to req.environ 0.1.1 ----- * validate attributes against basestring v0.1 ---- * add support for audit api middleware * DMTF CADF format * test commit * initial project setup * Added .gitreview pycadf-0.4.1/setup.cfg0000664000175400017540000000152612301706030015735 0ustar jenkinsjenkins00000000000000[metadata] name = pycadf author = OpenStack author-email = openstack-dev@lists.openstack.org summary = CADF Library description-file = README.rst home-page = https://launchpad.net/pycadf classifier = Development Status :: 3 - Alpha Environment :: OpenStack Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: Apache Software License Operating System :: OS Independent Programming Language :: Python Programming Language :: Python :: 2.6 Programming Language :: Python :: 2.7 [files] packages = pycadf data_files = etc/pycadf = etc/pycadf/api_audit_map.conf [global] setup-hooks = pbr.hooks.setup_hook [build_sphinx] source-dir = doc/source build-dir = doc/build all_files = 1 [upload_sphinx] upload-dir = doc/build/html [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 pycadf-0.4.1/setup.py0000664000175400017540000000141512301705746015640 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True) pycadf-0.4.1/README.rst0000664000175400017540000000017412301705746015616 0ustar jenkinsjenkins00000000000000======== PyCADF ======== see the ReleaseNotes document and the project home for more info. http://launchpad.net/pycadf pycadf-0.4.1/openstack-common.conf0000664000175400017540000000015412301705746020251 0ustar jenkinsjenkins00000000000000[DEFAULT] module=context module=fixture module=gettextutils module=importutils module=jsonutils base=pycadf pycadf-0.4.1/PKG-INFO0000664000175400017540000000152212301706030015205 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: pycadf Version: 0.4.1 Summary: CADF Library Home-page: https://launchpad.net/pycadf Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: ======== PyCADF ======== see the ReleaseNotes document and the project home for more info. http://launchpad.net/pycadf Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 pycadf-0.4.1/.testr.conf0000664000175400017540000000032212301705746016210 0ustar jenkinsjenkins00000000000000[DEFAULT] test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 OS_TEST_TIMEOUT=60 ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list pycadf-0.4.1/etc/0000775000175400017540000000000012301706030014663 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/etc/pycadf/0000775000175400017540000000000012301706030016131 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/etc/pycadf/api_audit_map.conf0000664000175400017540000000333212301705746021612 0ustar jenkinsjenkins00000000000000[DEFAULT] # default target endpoint type # should match the endpoint type defined in service catalog target_endpoint_type = None [custom_actions] enable = enable disable = disable delete = delete startup = start/startup shutdown = stop/shutdown reboot = start/reboot os-migrations/get = read os-server-password/post = update # possible end path of api requests [path_keywords] add = None action = None enable = None disable = None configure-project = None defaults = None delete = None detail = None diagnostics = None entries = entry extensions = alias flavors = flavor images = image ips = label limits = None metadata = key os-agents = os-agent os-aggregates = os-aggregate os-availability-zone = None os-certificates = None os-cloudpipe = None os-fixed-ips = ip os-extra_specs = key os-flavor-access = None os-floating-ip-dns = domain os-floating-ips-bulk = host os-floating-ip-pools = None os-floating-ips = floating-ip os-hosts = host os-hypervisors = hypervisor os-instance-actions = instance-action os-keypairs = keypair os-migrations = None os-networks = network os-quota-sets = tenant os-security-groups = security-group os-security-group-rules = rule os-server-password = None os-services = None os-simple-tenant-usage = tenant os-virtual-interfaces = None os-volume_attachments = attachment os-volumes = volume os-volume-types = volume-type os-snapshots = snapshot reboot = None servers = server shutdown = None startup = None statistics = None # map endpoint type defined in service catalog to CADF typeURI [service_endpoints] identity = service/security object-store = service/storage/object volume = service/storage/block image = service/storage/image network = service/network compute = service/compute metering = service/bss/meteringpycadf-0.4.1/pycadf.egg-info/0000775000175400017540000000000012301706030017050 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf.egg-info/dependency_links.txt0000664000175400017540000000000112301706030023116 0ustar jenkinsjenkins00000000000000 pycadf-0.4.1/pycadf.egg-info/not-zip-safe0000664000175400017540000000000112301706026021303 0ustar jenkinsjenkins00000000000000 pycadf-0.4.1/pycadf.egg-info/PKG-INFO0000664000175400017540000000152212301706030020145 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: pycadf Version: 0.4.1 Summary: CADF Library Home-page: https://launchpad.net/pycadf Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: ======== PyCADF ======== see the ReleaseNotes document and the project home for more info. http://launchpad.net/pycadf Platform: UNKNOWN Classifier: Development Status :: 3 - Alpha Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 pycadf-0.4.1/pycadf.egg-info/top_level.txt0000664000175400017540000000000712301706030021577 0ustar jenkinsjenkins00000000000000pycadf pycadf-0.4.1/pycadf.egg-info/requires.txt0000664000175400017540000000016712301706030021454 0ustar jenkinsjenkins00000000000000Babel>=1.3 iso8601>=0.1.8 netaddr>=0.7.6 oslo.config>=1.2.0 oslo.messaging>=1.3.0a4 pytz>=2010h six>=1.4.1 WebOb>=1.2.3pycadf-0.4.1/pycadf.egg-info/SOURCES.txt0000664000175400017540000000343012301706030020734 0ustar jenkinsjenkins00000000000000.testr.conf AUTHORS ChangeLog LICENSE README.rst openstack-common.conf requirements.txt setup.cfg setup.py test-requirements.txt tox.ini etc/pycadf/api_audit_map.conf pycadf/__init__.py pycadf/attachment.py pycadf/cadftaxonomy.py pycadf/cadftype.py pycadf/credential.py pycadf/endpoint.py pycadf/event.py pycadf/eventfactory.py pycadf/geolocation.py pycadf/host.py pycadf/identifier.py pycadf/measurement.py pycadf/metric.py pycadf/path.py pycadf/reason.py pycadf/reporterstep.py pycadf/resource.py pycadf/tag.py pycadf/timestamp.py pycadf/utils.py pycadf.egg-info/PKG-INFO pycadf.egg-info/SOURCES.txt pycadf.egg-info/dependency_links.txt pycadf.egg-info/not-zip-safe pycadf.egg-info/requires.txt pycadf.egg-info/top_level.txt pycadf/audit/__init__.py pycadf/audit/api.py pycadf/middleware/__init__.py pycadf/middleware/audit.py pycadf/middleware/base.py pycadf/middleware/notifier.py pycadf/openstack/__init__.py pycadf/openstack/common/__init__.py pycadf/openstack/common/context.py pycadf/openstack/common/excutils.py pycadf/openstack/common/fileutils.py pycadf/openstack/common/gettextutils.py pycadf/openstack/common/importutils.py pycadf/openstack/common/jsonutils.py pycadf/openstack/common/local.py pycadf/openstack/common/lockutils.py pycadf/openstack/common/log.py pycadf/openstack/common/timeutils.py pycadf/openstack/common/fixture/__init__.py pycadf/openstack/common/fixture/config.py pycadf/openstack/common/fixture/lockutils.py pycadf/openstack/common/fixture/logging.py pycadf/openstack/common/fixture/mockpatch.py pycadf/openstack/common/fixture/moxstubout.py pycadf/tests/__init__.py pycadf/tests/base.py pycadf/tests/test_cadf_spec.py pycadf/tests/test_utils.py pycadf/tests/audit/__init__.py pycadf/tests/audit/test_api.py pycadf/tests/middleware/__init__.py pycadf/tests/middleware/test_audit.pypycadf-0.4.1/LICENSE0000664000175400017540000002665212301705746015145 0ustar jenkinsjenkins00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. --- License for python-keystoneclient versions prior to 2.1 --- All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of this project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pycadf-0.4.1/requirements.txt0000664000175400017540000000017012301705746017407 0ustar jenkinsjenkins00000000000000Babel>=1.3 iso8601>=0.1.8 netaddr>=0.7.6 oslo.config>=1.2.0 oslo.messaging>=1.3.0a4 pytz>=2010h six>=1.4.1 WebOb>=1.2.3 pycadf-0.4.1/pycadf/0000775000175400017540000000000012301706030015356 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/attachment.py0000664000175400017540000000454612301705746020106 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype ATTACHMENT_KEYNAME_TYPEURI = "typeURI" ATTACHMENT_KEYNAME_CONTENT = "content" ATTACHMENT_KEYNAME_NAME = "name" ATTACHMENT_KEYNAMES = [ATTACHMENT_KEYNAME_TYPEURI, ATTACHMENT_KEYNAME_CONTENT, ATTACHMENT_KEYNAME_NAME] class Attachment(cadftype.CADFAbstractType): # TODO(mrutkows): OpenStack / Ceilometer may want to define # the set of approved attachment types in order to # limit and validate them. typeURI = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_TYPEURI, lambda x: isinstance( x, six.string_types)) content = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_CONTENT) name = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_NAME, lambda x: isinstance(x, six.string_types)) def __init__(self, typeURI=None, content=None, name=None): # Attachment.typeURI if typeURI is not None: setattr(self, ATTACHMENT_KEYNAME_TYPEURI, typeURI) # Attachment.content if content is not None: setattr(self, ATTACHMENT_KEYNAME_CONTENT, content) # Attachment.name if name is not None: setattr(self, ATTACHMENT_KEYNAME_NAME, name) # self validate cadf:Attachment type against schema def is_valid(self): # Existence test, All attributes must exist for valid Attachment type return ( self._isset(ATTACHMENT_KEYNAME_TYPEURI) and self._isset(ATTACHMENT_KEYNAME_NAME) and self._isset(ATTACHMENT_KEYNAME_CONTENT) ) pycadf-0.4.1/pycadf/metric.py0000664000175400017540000000542412301705746017235 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype from pycadf import identifier # Metric types can appear outside a cadf:Event record context, in these cases # a typeURI may be used to identify the cadf:Metric data type. TYPE_URI_METRIC = cadftype.CADF_VERSION_1_0_0 + 'metric' METRIC_KEYNAME_METRICID = "metricId" METRIC_KEYNAME_UNIT = "unit" METRIC_KEYNAME_NAME = "name" #METRIC_KEYNAME_ANNOTATIONS = "annotations" METRIC_KEYNAMES = [METRIC_KEYNAME_METRICID, METRIC_KEYNAME_UNIT, METRIC_KEYNAME_NAME #METRIC_KEYNAME_ANNOTATIONS ] class Metric(cadftype.CADFAbstractType): metricId = cadftype.ValidatorDescriptor(METRIC_KEYNAME_METRICID, lambda x: identifier.is_valid(x)) unit = cadftype.ValidatorDescriptor(METRIC_KEYNAME_UNIT, lambda x: isinstance(x, six.string_types)) name = cadftype.ValidatorDescriptor(METRIC_KEYNAME_NAME, lambda x: isinstance(x, six.string_types)) def __init__(self, metricId=None, unit=None, name=None): # Metric.id setattr(self, METRIC_KEYNAME_METRICID, metricId or identifier.generate_uuid()) # Metric.unit if unit is not None: setattr(self, METRIC_KEYNAME_UNIT, unit) # Metric.name if name is not None: setattr(self, METRIC_KEYNAME_NAME, name) # TODO(mrutkows): add mechanism for annotations, OpenStack may choose # not to support this "extension mechanism" and is not required (and not # critical in many audit contexts) def set_annotations(self, value): raise NotImplementedError() # setattr(self, METRIC_KEYNAME_ANNOTATIONS, value) # self validate cadf:Metric type against schema def is_valid(self): # Existence test, id, and unit attributes must both exist return ( self._isset(METRIC_KEYNAME_METRICID) and self._isset(METRIC_KEYNAME_UNIT) ) pycadf-0.4.1/pycadf/reason.py0000664000175400017540000000517112301705746017240 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype TYPE_URI_REASON = cadftype.CADF_VERSION_1_0_0 + 'reason' REASON_KEYNAME_REASONTYPE = "reasonType" REASON_KEYNAME_REASONCODE = "reasonCode" REASON_KEYNAME_POLICYTYPE = "policyType" REASON_KEYNAME_POLICYID = "policyId" REASON_KEYNAMES = [REASON_KEYNAME_REASONTYPE, REASON_KEYNAME_REASONCODE, REASON_KEYNAME_POLICYTYPE, REASON_KEYNAME_POLICYID] class Reason(cadftype.CADFAbstractType): reasonType = cadftype.ValidatorDescriptor( REASON_KEYNAME_REASONTYPE, lambda x: isinstance(x, six.string_types)) reasonCode = cadftype.ValidatorDescriptor( REASON_KEYNAME_REASONCODE, lambda x: isinstance(x, six.string_types)) policyType = cadftype.ValidatorDescriptor( REASON_KEYNAME_POLICYTYPE, lambda x: isinstance(x, six.string_types)) policyId = cadftype.ValidatorDescriptor( REASON_KEYNAME_POLICYID, lambda x: isinstance(x, six.string_types)) def __init__(self, reasonType=None, reasonCode=None, policyType=None, policyId=None): # Reason.reasonType if reasonType is not None: setattr(self, REASON_KEYNAME_REASONTYPE, reasonType) # Reason.reasonCode if reasonCode is not None: setattr(self, REASON_KEYNAME_REASONCODE, reasonCode) # Reason.policyType if policyType is not None: setattr(self, REASON_KEYNAME_POLICYTYPE, policyType) # Reason.policyId if policyId is not None: setattr(self, REASON_KEYNAME_POLICYID, policyId) # TODO(mrutkows): validate this cadf:Reason type against schema def is_valid(self): # MUST have at least one valid pairing of reason+code or policy+id return ((self._isset(REASON_KEYNAME_REASONTYPE) and self._isset(REASON_KEYNAME_REASONCODE)) or (self._isset(REASON_KEYNAME_POLICYTYPE) and self._isset(REASON_KEYNAME_POLICYID))) pycadf-0.4.1/pycadf/endpoint.py0000664000175400017540000000341712301705746017572 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype TYPE_URI_ENDPOINT = cadftype.CADF_VERSION_1_0_0 + 'endpoint' ENDPOINT_KEYNAME_URL = "url" ENDPOINT_KEYNAME_NAME = "name" ENDPOINT_KEYNAME_PORT = "port" ENDPOINT_KEYNAMES = [ENDPOINT_KEYNAME_URL, ENDPOINT_KEYNAME_NAME, ENDPOINT_KEYNAME_PORT] class Endpoint(cadftype.CADFAbstractType): url = cadftype.ValidatorDescriptor( ENDPOINT_KEYNAME_URL, lambda x: isinstance(x, six.string_types)) name = cadftype.ValidatorDescriptor( ENDPOINT_KEYNAME_NAME, lambda x: isinstance(x, six.string_types)) port = cadftype.ValidatorDescriptor( ENDPOINT_KEYNAME_PORT, lambda x: isinstance(x, six.string_types)) def __init__(self, url, name=None, port=None): # ENDPOINT.url setattr(self, ENDPOINT_KEYNAME_URL, url) # ENDPOINT.name if name is not None: setattr(self, ENDPOINT_KEYNAME_NAME, name) # ENDPOINT.port if port is not None: setattr(self, ENDPOINT_KEYNAME_PORT, port) # TODO(mrutkows): validate this cadf:ENDPOINT type against schema def is_valid(self): return self._isset(ENDPOINT_KEYNAME_URL) pycadf-0.4.1/pycadf/host.py0000664000175400017540000000407412301705746016727 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype from pycadf import identifier TYPE_URI_HOST = cadftype.CADF_VERSION_1_0_0 + 'host' HOST_KEYNAME_ID = "id" HOST_KEYNAME_ADDR = "address" HOST_KEYNAME_AGENT = "agent" HOST_KEYNAME_PLATFORM = "platform" HOST_KEYNAMES = [HOST_KEYNAME_ID, HOST_KEYNAME_ADDR, HOST_KEYNAME_AGENT, HOST_KEYNAME_PLATFORM] class Host(cadftype.CADFAbstractType): id = cadftype.ValidatorDescriptor( HOST_KEYNAME_ID, lambda x: identifier.is_valid(x)) address = cadftype.ValidatorDescriptor( HOST_KEYNAME_ADDR, lambda x: isinstance(x, six.string_types)) agent = cadftype.ValidatorDescriptor( HOST_KEYNAME_AGENT, lambda x: isinstance(x, six.string_types)) platform = cadftype.ValidatorDescriptor( HOST_KEYNAME_PLATFORM, lambda x: isinstance(x, six.string_types)) def __init__(self, id=None, address=None, agent=None, platform=None): # Host.id if id is not None: setattr(self, HOST_KEYNAME_ID, id) # Host.address if address is not None: setattr(self, HOST_KEYNAME_ADDR, address) # Host.agent if agent is not None: setattr(self, HOST_KEYNAME_AGENT, agent) # Host.platform if platform is not None: setattr(self, HOST_KEYNAME_PLATFORM, platform) # TODO(mrutkows): validate this cadf:Host type against schema def is_valid(self): return True pycadf-0.4.1/pycadf/geolocation.py0000664000175400017540000001120312301705746020245 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype from pycadf import identifier # Geolocation types can appear outside a cadf:Event record context, in these # cases a typeURI may be used to identify the cadf:Geolocation data type. TYPE_URI_GEOLOCATION = cadftype.CADF_VERSION_1_0_0 + 'geolocation' GEO_KEYNAME_ID = "id" GEO_KEYNAME_LATITUDE = "latitude" GEO_KEYNAME_LONGITUDE = "longitude" GEO_KEYNAME_ELEVATION = "elevation" GEO_KEYNAME_ACCURACY = "accuracy" GEO_KEYNAME_CITY = "city" GEO_KEYNAME_STATE = "state" GEO_KEYNAME_REGIONICANN = "regionICANN" #GEO_KEYNAME_ANNOTATIONS = "annotations" GEO_KEYNAMES = [GEO_KEYNAME_ID, GEO_KEYNAME_LATITUDE, GEO_KEYNAME_LONGITUDE, GEO_KEYNAME_ELEVATION, GEO_KEYNAME_ACCURACY, GEO_KEYNAME_CITY, GEO_KEYNAME_STATE, GEO_KEYNAME_REGIONICANN #GEO_KEYNAME_ANNOTATIONS ] class Geolocation(cadftype.CADFAbstractType): id = cadftype.ValidatorDescriptor(GEO_KEYNAME_ID, lambda x: identifier.is_valid(x)) # TODO(mrutkows): we may want to do more validation to make # sure numeric range represented by string is valid latitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LATITUDE, lambda x: isinstance( x, six.string_types)) longitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LONGITUDE, lambda x: isinstance( x, six.string_types)) elevation = cadftype.ValidatorDescriptor(GEO_KEYNAME_ELEVATION, lambda x: isinstance( x, six.string_types)) accuracy = cadftype.ValidatorDescriptor(GEO_KEYNAME_ACCURACY, lambda x: isinstance( x, six.string_types)) city = cadftype.ValidatorDescriptor(GEO_KEYNAME_CITY, lambda x: isinstance( x, six.string_types)) state = cadftype.ValidatorDescriptor(GEO_KEYNAME_STATE, lambda x: isinstance( x, six.string_types)) regionICANN = cadftype.ValidatorDescriptor( GEO_KEYNAME_REGIONICANN, lambda x: isinstance(x, six.string_types)) def __init__(self, id=None, latitude=None, longitude=None, elevation=None, accuracy=None, city=None, state=None, regionICANN=None): # Geolocation.id if id is not None: setattr(self, GEO_KEYNAME_ID, id) # Geolocation.latitude if latitude is not None: setattr(self, GEO_KEYNAME_LATITUDE, latitude) # Geolocation.longitude if longitude is not None: setattr(self, GEO_KEYNAME_LONGITUDE, longitude) # Geolocation.elevation if elevation is not None: setattr(self, GEO_KEYNAME_ELEVATION, elevation) # Geolocation.accuracy if accuracy is not None: setattr(self, GEO_KEYNAME_ACCURACY, accuracy) # Geolocation.city if city is not None: setattr(self, GEO_KEYNAME_CITY, city) # Geolocation.state if state is not None: setattr(self, GEO_KEYNAME_STATE, state) # Geolocation.regionICANN if regionICANN is not None: setattr(self, GEO_KEYNAME_REGIONICANN, regionICANN) # TODO(mrutkows): add mechanism for annotations, OpenStack may choose # not to support this "extension mechanism" and is not required (and not # critical in many audit contexts) def set_annotations(self, value): raise NotImplementedError() # setattr(self, GEO_KEYNAME_ANNOTATIONS, value) # self validate cadf:Geolocation type def is_valid(self): return True pycadf-0.4.1/pycadf/middleware/0000775000175400017540000000000012301706030017473 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/middleware/notifier.py0000664000175400017540000001102112301705746021674 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 eNovance # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Send notifications on request """ import os.path import sys import traceback as tb from oslo.config import cfg import oslo.messaging import six import webob.dec from pycadf.middleware import base from pycadf.openstack.common import context from pycadf.openstack.common.gettextutils import _ # noqa LOG = None def log_and_ignore_error(fn): def wrapped(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: if LOG: LOG.exception(_('An exception occurred processing ' 'the API call: %s ') % e) return wrapped class RequestNotifier(base.Middleware): """Send notification on request.""" @classmethod def factory(cls, global_conf, **local_conf): """Factory method for paste.deploy.""" conf = global_conf.copy() conf.update(local_conf) def _factory(app): return cls(app, **conf) return _factory def __init__(self, app, **conf): global LOG proj = cfg.CONF.project TRANSPORT_ALIASES = {} if proj: log_mod = '%s.openstack.common.log' % proj if log_mod in sys.modules: LOG = sys.modules[log_mod].getLogger(__name__) # Aliases to support backward compatibility TRANSPORT_ALIASES = { '%s.openstack.common.rpc.impl_kombu' % proj: 'rabbit', '%s.openstack.common.rpc.impl_qpid' % proj: 'qpid', '%s.openstack.common.rpc.impl_zmq' % proj: 'zmq', '%s.rpc.impl_kombu' % proj: 'rabbit', '%s.rpc.impl_qpid' % proj: 'qpid', '%s.rpc.impl_zmq' % proj: 'zmq', } self.service_name = conf.get('service_name') self.ignore_req_list = [x.upper().strip() for x in conf.get('ignore_req_list', '').split(',')] self.notifier = oslo.messaging.Notifier( oslo.messaging.get_transport(cfg.CONF, aliases=TRANSPORT_ALIASES), os.path.basename(sys.argv[0])) super(RequestNotifier, self).__init__(app) @staticmethod def environ_to_dict(environ): """Following PEP 333, server variables are lower case, so don't include them. """ return dict((k, v) for k, v in six.iteritems(environ) if k.isupper()) @log_and_ignore_error def process_request(self, request): request.environ['HTTP_X_SERVICE_NAME'] = \ self.service_name or request.host payload = { 'request': self.environ_to_dict(request.environ), } self.notifier.info(context.get_admin_context().to_dict(), 'http.request', payload) @log_and_ignore_error def process_response(self, request, response, exception=None, traceback=None): payload = { 'request': self.environ_to_dict(request.environ), } if response: payload['response'] = { 'status': response.status, 'headers': response.headers, } if exception: payload['exception'] = { 'value': repr(exception), 'traceback': tb.format_tb(traceback) } self.notifier.info(context.get_admin_context().to_dict(), 'http.response', payload) @webob.dec.wsgify def __call__(self, req): if req.method in self.ignore_req_list: return req.get_response(self.application) else: self.process_request(req) try: response = req.get_response(self.application) except Exception: exc_type, value, traceback = sys.exc_info() self.process_response(req, None, value, traceback) raise else: self.process_response(req, response) return response pycadf-0.4.1/pycadf/middleware/audit.py0000664000175400017540000000332212301705746021170 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Attach open standard audit information to request.environ AuditMiddleware filter should be place after Keystone's auth_token middleware in the pipeline so that it can utilise the information Keystone provides. """ from pycadf.audit import api as cadf_api from pycadf.middleware import notifier class AuditMiddleware(notifier.RequestNotifier): def __init__(self, app, **conf): super(AuditMiddleware, self).__init__(app, **conf) map_file = conf.get('audit_map_file', None) self.cadf_audit = cadf_api.OpenStackAuditApi(map_file) @notifier.log_and_ignore_error def process_request(self, request): self.cadf_audit.append_audit_event(request) super(AuditMiddleware, self).process_request(request) @notifier.log_and_ignore_error def process_response(self, request, response, exception=None, traceback=None): self.cadf_audit.mod_audit_event(request, response) super(AuditMiddleware, self).process_response(request, response, exception, traceback) pycadf-0.4.1/pycadf/middleware/base.py0000664000175400017540000000347712301705746021007 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base class(es) for WSGI Middleware.""" import webob.dec class Middleware(object): """Base WSGI middleware wrapper. These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ @classmethod def factory(cls, global_conf, **local_conf): """Factory method for paste.deploy.""" return cls def __init__(self, application): self.application = application def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) return self.process_response(response) pycadf-0.4.1/pycadf/middleware/__init__.py0000664000175400017540000000000012301705746021607 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/event.py0000664000175400017540000002327012301705746017072 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import attachment from pycadf import cadftaxonomy from pycadf import cadftype from pycadf import identifier from pycadf import measurement from pycadf import reason from pycadf import reporterstep from pycadf import resource from pycadf import tag from pycadf import timestamp TYPE_URI_EVENT = cadftype.CADF_VERSION_1_0_0 + 'event' # Event.eventType EVENT_KEYNAME_TYPEURI = "typeURI" EVENT_KEYNAME_EVENTTYPE = "eventType" EVENT_KEYNAME_ID = "id" EVENT_KEYNAME_EVENTTIME = "eventTime" EVENT_KEYNAME_INITIATOR = "initiator" EVENT_KEYNAME_INITIATORID = "initiatorId" EVENT_KEYNAME_ACTION = "action" EVENT_KEYNAME_TARGET = "target" EVENT_KEYNAME_TARGETID = "targetId" EVENT_KEYNAME_OUTCOME = "outcome" EVENT_KEYNAME_REASON = "reason" EVENT_KEYNAME_SEVERITY = "severity" EVENT_KEYNAME_MEASUREMENTS = "measurements" EVENT_KEYNAME_TAGS = "tags" EVENT_KEYNAME_ATTACHMENTS = "attachments" EVENT_KEYNAME_OBSERVER = "observer" EVENT_KEYNAME_OBSERVERID = "observerId" EVENT_KEYNAME_REPORTERCHAIN = "reporterchain" EVENT_KEYNAMES = [EVENT_KEYNAME_TYPEURI, EVENT_KEYNAME_EVENTTYPE, EVENT_KEYNAME_ID, EVENT_KEYNAME_EVENTTIME, EVENT_KEYNAME_INITIATOR, EVENT_KEYNAME_INITIATORID, EVENT_KEYNAME_ACTION, EVENT_KEYNAME_TARGET, EVENT_KEYNAME_TARGETID, EVENT_KEYNAME_OUTCOME, EVENT_KEYNAME_REASON, EVENT_KEYNAME_SEVERITY, EVENT_KEYNAME_MEASUREMENTS, EVENT_KEYNAME_TAGS, EVENT_KEYNAME_ATTACHMENTS, EVENT_KEYNAME_OBSERVER, EVENT_KEYNAME_OBSERVERID, EVENT_KEYNAME_REPORTERCHAIN] class Event(cadftype.CADFAbstractType): eventType = cadftype.ValidatorDescriptor( EVENT_KEYNAME_EVENTTYPE, lambda x: cadftype.is_valid_eventType(x)) id = cadftype.ValidatorDescriptor(EVENT_KEYNAME_ID, lambda x: identifier.is_valid(x)) eventTime = cadftype.ValidatorDescriptor(EVENT_KEYNAME_EVENTTIME, lambda x: timestamp.is_valid(x)) initiator = cadftype.ValidatorDescriptor( EVENT_KEYNAME_INITIATOR, (lambda x: isinstance(x, resource.Resource) and x.is_valid() and x.id != 'initiator')) initiatorId = cadftype.ValidatorDescriptor( EVENT_KEYNAME_INITIATORID, lambda x: identifier.is_valid(x)) action = cadftype.ValidatorDescriptor( EVENT_KEYNAME_ACTION, lambda x: cadftaxonomy.is_valid_action(x)) target = cadftype.ValidatorDescriptor( EVENT_KEYNAME_TARGET, (lambda x: isinstance(x, resource.Resource) and x.is_valid() and x.id != 'target')) targetId = cadftype.ValidatorDescriptor( EVENT_KEYNAME_TARGETID, lambda x: identifier.is_valid(x)) outcome = cadftype.ValidatorDescriptor( EVENT_KEYNAME_OUTCOME, lambda x: cadftaxonomy.is_valid_outcome(x)) reason = cadftype.ValidatorDescriptor( EVENT_KEYNAME_REASON, lambda x: isinstance(x, reason.Reason) and x.is_valid()) severity = cadftype.ValidatorDescriptor(EVENT_KEYNAME_SEVERITY, lambda x: isinstance( x, six.string_types)) observer = cadftype.ValidatorDescriptor( EVENT_KEYNAME_OBSERVER, (lambda x: isinstance(x, resource.Resource) and x.is_valid())) observerId = cadftype.ValidatorDescriptor( EVENT_KEYNAME_OBSERVERID, lambda x: identifier.is_valid(x)) def __init__(self, eventType=cadftype.EVENTTYPE_ACTIVITY, id=None, eventTime=None, action=cadftaxonomy.UNKNOWN, outcome=cadftaxonomy.UNKNOWN, initiator=None, initiatorId=None, target=None, targetId=None, severity=None, reason=None, observer=None, observerId=None): # Establish typeURI for the CADF Event data type # TODO(mrutkows): support extended typeURIs for Event subtypes setattr(self, EVENT_KEYNAME_TYPEURI, TYPE_URI_EVENT) # Event.eventType (Mandatory) setattr(self, EVENT_KEYNAME_EVENTTYPE, eventType) # Event.id (Mandatory) setattr(self, EVENT_KEYNAME_ID, id or identifier.generate_uuid()) # Event.eventTime (Mandatory) setattr(self, EVENT_KEYNAME_EVENTTIME, eventTime or timestamp.get_utc_now()) # Event.action (Mandatory) setattr(self, EVENT_KEYNAME_ACTION, action) # Event.outcome (Mandatory) setattr(self, EVENT_KEYNAME_OUTCOME, outcome) # Event.observer (Mandatory if no observerId) if observer is not None: setattr(self, EVENT_KEYNAME_OBSERVER, observer) # Event.observerId (Dependent) if observerId is not None: setattr(self, EVENT_KEYNAME_OBSERVERID, observerId) # Event.initiator (Mandatory if no initiatorId) if initiator is not None: setattr(self, EVENT_KEYNAME_INITIATOR, initiator) # Event.initiatorId (Dependent) if initiatorId is not None: setattr(self, EVENT_KEYNAME_INITIATORID, initiatorId) # Event.target (Mandatory if no targetId) if target is not None: setattr(self, EVENT_KEYNAME_TARGET, target) # Event.targetId (Dependent) if targetId is not None: setattr(self, EVENT_KEYNAME_TARGETID, targetId) # Event.severity (Optional) if severity is not None: setattr(self, EVENT_KEYNAME_SEVERITY, severity) # Event.reason (Optional) if reason is not None: setattr(self, EVENT_KEYNAME_REASON, reason) # Event.reporterchain def add_reporterstep(self, step): if step is not None and isinstance(step, reporterstep.Reporterstep): if step.is_valid(): # Create the list of Reportersteps if needed if not hasattr(self, EVENT_KEYNAME_REPORTERCHAIN): setattr(self, EVENT_KEYNAME_REPORTERCHAIN, list()) reporterchain = getattr(self, EVENT_KEYNAME_REPORTERCHAIN) reporterchain.append(step) else: raise ValueError('Invalid reporterstep') else: raise ValueError('Invalid reporterstep. ' 'Value must be a Reporterstep') # Event.measurements def add_measurement(self, measure_val): if (measure_val is not None and isinstance(measure_val, measurement.Measurement)): if measure_val.is_valid(): # Create the list of event.Measurements if needed if not hasattr(self, EVENT_KEYNAME_MEASUREMENTS): setattr(self, EVENT_KEYNAME_MEASUREMENTS, list()) measurements = getattr(self, EVENT_KEYNAME_MEASUREMENTS) measurements.append(measure_val) else: raise ValueError('Invalid measurement') else: raise ValueError('Invalid measurement. ' 'Value must be a Measurement') # Event.tags def add_tag(self, tag_val): if tag.is_valid(tag_val): if not hasattr(self, EVENT_KEYNAME_TAGS): setattr(self, EVENT_KEYNAME_TAGS, list()) getattr(self, EVENT_KEYNAME_TAGS).append(tag_val) else: raise ValueError('Invalid tag') # Event.attachments def add_attachment(self, attachment_val): if (attachment_val is not None and isinstance(attachment_val, attachment.Attachment)): if attachment_val.is_valid(): # Create the list of Attachments if needed if not hasattr(self, EVENT_KEYNAME_ATTACHMENTS): setattr(self, EVENT_KEYNAME_ATTACHMENTS, list()) attachments = getattr(self, EVENT_KEYNAME_ATTACHMENTS) attachments.append(attachment_val) else: raise ValueError('Invalid attachment') else: raise ValueError('Invalid attachment. ' 'Value must be an Attachment') # self validate cadf:Event record against schema def is_valid(self): # TODO(mrutkows): Eventually, make sure all attributes are # from either the CADF spec. (or profiles thereof) # TODO(mrutkows): validate all child attributes that are CADF types return ( self._isset(EVENT_KEYNAME_TYPEURI) and self._isset(EVENT_KEYNAME_EVENTTYPE) and self._isset(EVENT_KEYNAME_ID) and self._isset(EVENT_KEYNAME_EVENTTIME) and self._isset(EVENT_KEYNAME_ACTION) and self._isset(EVENT_KEYNAME_OUTCOME) and (self._isset(EVENT_KEYNAME_INITIATOR) ^ self._isset(EVENT_KEYNAME_INITIATORID)) and (self._isset(EVENT_KEYNAME_TARGET) ^ self._isset(EVENT_KEYNAME_TARGETID)) and (self._isset(EVENT_KEYNAME_OBSERVER) ^ self._isset(EVENT_KEYNAME_OBSERVERID)) ) pycadf-0.4.1/pycadf/tests/0000775000175400017540000000000012301706030016520 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tests/middleware/0000775000175400017540000000000012301706030020635 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tests/middleware/test_audit.py0000664000175400017540000001600712301705746023375 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import webob from pycadf.audit import api as cadf_api from pycadf.middleware import audit from pycadf.tests import base class FakeApp(object): def __call__(self, env, start_response): body = 'Some response' start_response('200 OK', [ ('Content-Type', 'text/plain'), ('Content-Length', str(sum(map(len, body)))) ]) return [body] class FakeFailingApp(object): def __call__(self, env, start_response): raise Exception("It happens!") @mock.patch('oslo.messaging.get_transport', mock.MagicMock()) class AuditMiddlewareTest(base.TestCase): ENV_HEADERS = {'HTTP_X_SERVICE_CATALOG': '''[{"endpoints_links": [], "endpoints": [{"adminURL": "http://host:8774/v2/admin", "region": "RegionOne", "publicURL": "http://host:8774/v2/public", "internalURL": "http://host:8774/v2/internal", "id": "resource_id"}], "type": "compute", "name": "nova"},]''', 'HTTP_X_USER_ID': 'user_id', 'HTTP_X_USER_NAME': 'user_name', 'HTTP_X_AUTH_TOKEN': 'token', 'HTTP_X_PROJECT_ID': 'tenant_id', 'HTTP_X_IDENTITY_STATUS': 'Confirmed'} def setUp(self): super(AuditMiddlewareTest, self).setUp() self.map_file = 'etc/pycadf/api_audit_map.conf' def test_api_request(self): middleware = audit.AuditMiddleware(FakeApp(), audit_map_file= 'etc/pycadf/api_audit_map.conf', service_name='pycadf') self.ENV_HEADERS['REQUEST_METHOD'] = 'GET' req = webob.Request.blank('/foo/bar', environ=self.ENV_HEADERS) with mock.patch('oslo.messaging.Notifier.info') as notify: middleware(req) # Check first notification with only 'request' call_args = notify.call_args_list[0][0] self.assertEqual(call_args[1], 'http.request') self.assertEqual(set(call_args[2].keys()), set(['request'])) request = call_args[2]['request'] self.assertEqual(request['PATH_INFO'], '/foo/bar') self.assertEqual(request['REQUEST_METHOD'], 'GET') self.assertIn('CADF_EVENT', request) self.assertEqual(request['CADF_EVENT']['outcome'], 'pending') # Check second notification with request + response call_args = notify.call_args_list[1][0] self.assertEqual(call_args[1], 'http.response') self.assertEqual(set(call_args[2].keys()), set(['request', 'response'])) request = call_args[2]['request'] self.assertEqual(request['PATH_INFO'], '/foo/bar') self.assertEqual(request['REQUEST_METHOD'], 'GET') self.assertIn('CADF_EVENT', request) self.assertEqual(request['CADF_EVENT']['outcome'], 'success') def test_api_request_failure(self): middleware = audit.AuditMiddleware(FakeFailingApp(), audit_map_file= 'etc/pycadf/api_audit_map.conf', service_name='pycadf') self.ENV_HEADERS['REQUEST_METHOD'] = 'GET' req = webob.Request.blank('/foo/bar', environ=self.ENV_HEADERS) with mock.patch('oslo.messaging.Notifier.info') as notify: try: middleware(req) self.fail("Application exception has not been re-raised") except Exception: pass # Check first notification with only 'request' call_args = notify.call_args_list[0][0] self.assertEqual(call_args[1], 'http.request') self.assertEqual(set(call_args[2].keys()), set(['request'])) request = call_args[2]['request'] self.assertEqual(request['PATH_INFO'], '/foo/bar') self.assertEqual(request['REQUEST_METHOD'], 'GET') self.assertIn('CADF_EVENT', request) self.assertEqual(request['CADF_EVENT']['outcome'], 'pending') # Check second notification with request + response call_args = notify.call_args_list[1][0] self.assertEqual(call_args[1], 'http.response') self.assertEqual(set(call_args[2].keys()), set(['request', 'exception'])) request = call_args[2]['request'] self.assertEqual(request['PATH_INFO'], '/foo/bar') self.assertEqual(request['REQUEST_METHOD'], 'GET') self.assertIn('CADF_EVENT', request) self.assertEqual(request['CADF_EVENT']['outcome'], 'unknown') def test_process_request_fail(self): def func_error(self, req): raise Exception('error') self.stubs.Set(cadf_api.OpenStackAuditApi, 'append_audit_event', func_error) middleware = audit.AuditMiddleware(FakeApp(), audit_map_file= 'etc/pycadf/api_audit_map.conf', service_name='pycadf') req = webob.Request.blank('/foo/bar', environ={'REQUEST_METHOD': 'GET'}) middleware.process_request(req) def test_process_response_fail(self): def func_error(self, req, res): raise Exception('error') self.stubs.Set(cadf_api.OpenStackAuditApi, 'mod_audit_event', func_error) middleware = audit.AuditMiddleware(FakeApp(), audit_map_file= 'etc/pycadf/api_audit_map.conf', service_name='pycadf') req = webob.Request.blank('/foo/bar', environ={'REQUEST_METHOD': 'GET'}) middleware.process_response(req, webob.response.Response()) pycadf-0.4.1/pycadf/tests/middleware/__init__.py0000664000175400017540000000000012301705746022751 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tests/test_cadf_spec.py0000664000175400017540000003003512301705746022056 0ustar jenkinsjenkins00000000000000# # Copyright 2013 OpenStack LLC # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time from pycadf import attachment from pycadf import credential from pycadf import endpoint from pycadf import event from pycadf import geolocation from pycadf import host from pycadf import identifier from pycadf import measurement from pycadf import metric from pycadf import reason from pycadf import reporterstep from pycadf import resource from pycadf import tag from pycadf.tests import base from pycadf import timestamp class TestCADFSpec(base.TestCase): def test_endpoint(self): endp = endpoint.Endpoint(url='http://192.168.0.1', name='endpoint name', port='8080') self.assertEqual(endp.is_valid(), True) dict_endp = endp.as_dict() for key in endpoint.ENDPOINT_KEYNAMES: self.assertIn(key, dict_endp) def test_host(self): h = host.Host(id=identifier.generate_uuid(), address='192.168.0.1', agent='client', platform='AIX') self.assertEqual(h.is_valid(), True) dict_host = h.as_dict() for key in host.HOST_KEYNAMES: self.assertIn(key, dict_host) def test_credential(self): cred = credential.Credential(type='auth token', token=identifier.generate_uuid()) self.assertEqual(cred.is_valid(), True) dict_cred = cred.as_dict() for key in credential.CRED_KEYNAMES: self.assertIn(key, dict_cred) def test_geolocation(self): geo = geolocation.Geolocation(id=identifier.generate_uuid(), latitude='43.6481 N', longitude='79.4042 W', elevation='0', accuracy='1', city='toronto', state='ontario', regionICANN='ca') self.assertEqual(geo.is_valid(), True) dict_geo = geo.as_dict() for key in geolocation.GEO_KEYNAMES: self.assertIn(key, dict_geo) def test_metric(self): metric_val = metric.Metric(metricId=identifier.generate_uuid(), unit='b', name='bytes') self.assertEqual(metric_val.is_valid(), True) dict_metric_val = metric_val.as_dict() for key in metric.METRIC_KEYNAMES: self.assertIn(key, dict_metric_val) def test_measurement(self): measure_val = measurement.Measurement( result='100', metric=metric.Metric(), metricId=identifier.generate_uuid(), calculatedBy=resource.Resource(typeURI='storage')) self.assertEqual(measure_val.is_valid(), False) dict_measure_val = measure_val.as_dict() for key in measurement.MEASUREMENT_KEYNAMES: self.assertIn(key, dict_measure_val) measure_val = measurement.Measurement( result='100', metric=metric.Metric(), calculatedBy=resource.Resource(typeURI='storage')) self.assertEqual(measure_val.is_valid(), True) measure_val = measurement.Measurement( result='100', metricId=identifier.generate_uuid(), calculatedBy=resource.Resource(typeURI='storage')) self.assertEqual(measure_val.is_valid(), True) def test_reason(self): reason_val = reason.Reason(reasonType='HTTP', reasonCode='200', policyType='poltype', policyId=identifier.generate_uuid()) self.assertEqual(reason_val.is_valid(), True) dict_reason_val = reason_val.as_dict() for key in reason.REASON_KEYNAMES: self.assertIn(key, dict_reason_val) def test_reporterstep(self): step = reporterstep.Reporterstep( role='modifier', reporter=resource.Resource(typeURI='storage'), reporterId=identifier.generate_uuid(), reporterTime=timestamp.get_utc_now()) self.assertEqual(step.is_valid(), False) dict_step = step.as_dict() for key in reporterstep.REPORTERSTEP_KEYNAMES: self.assertIn(key, dict_step) step = reporterstep.Reporterstep( role='modifier', reporter=resource.Resource(typeURI='storage'), reporterTime=timestamp.get_utc_now()) self.assertEqual(step.is_valid(), True) step = reporterstep.Reporterstep( role='modifier', reporterId=identifier.generate_uuid(), reporterTime=timestamp.get_utc_now()) self.assertEqual(step.is_valid(), True) def test_attachment(self): attach = attachment.Attachment(typeURI='attachURI', content='content', name='attachment_name') self.assertEqual(attach.is_valid(), True) dict_attach = attach.as_dict() for key in attachment.ATTACHMENT_KEYNAMES: self.assertIn(key, dict_attach) def test_resource(self): res = resource.Resource(typeURI='storage', name='res_name', domain='res_domain', ref='res_ref', credential=credential.Credential( token=identifier.generate_uuid()), host=host.Host(address='192.168.0.1'), geolocation=geolocation.Geolocation(), geolocationId=identifier.generate_uuid()) res.add_attachment(attachment.Attachment(typeURI='attachURI', content='content', name='attachment_name')) res.add_address(endpoint.Endpoint(url='http://192.168.0.1')) self.assertEqual(res.is_valid(), True) dict_res = res.as_dict() for key in resource.RESOURCE_KEYNAMES: self.assertIn(key, dict_res) def test_resource_shortform(self): res = resource.Resource(id='target') self.assertEqual(res.is_valid(), True) res.add_attachment(attachment.Attachment(typeURI='attachURI', content='content', name='attachment_name')) self.assertEqual(res.is_valid(), False) def test_event(self): ev = event.Event(eventType='activity', id=identifier.generate_uuid(), eventTime=timestamp.get_utc_now(), initiator=resource.Resource(typeURI='storage'), initiatorId=identifier.generate_uuid(), action='read', target=resource.Resource(typeURI='storage'), targetId=identifier.generate_uuid(), observer=resource.Resource(id='target'), observerId=identifier.generate_uuid(), outcome='success', reason=reason.Reason(reasonType='HTTP', reasonCode='200'), severity='high') ev.add_measurement( measurement.Measurement(result='100', metricId=identifier.generate_uuid())), ev.add_tag(tag.generate_name_value_tag('name', 'val')) ev.add_attachment(attachment.Attachment(typeURI='attachURI', content='content', name='attachment_name')) ev.observer = resource.Resource(typeURI='service/security') ev.add_reporterstep(reporterstep.Reporterstep( role='observer', reporter=resource.Resource(typeURI='service/security'))) ev.add_reporterstep(reporterstep.Reporterstep( reporterId=identifier.generate_uuid())) self.assertEqual(ev.is_valid(), False) dict_ev = ev.as_dict() for key in event.EVENT_KEYNAMES: self.assertIn(key, dict_ev) ev = event.Event(eventType='activity', id=identifier.generate_uuid(), eventTime=timestamp.get_utc_now(), initiator=resource.Resource(typeURI='storage'), action='read', target=resource.Resource(typeURI='storage'), observer=resource.Resource(id='target'), outcome='success') self.assertEqual(ev.is_valid(), True) ev = event.Event(eventType='activity', id=identifier.generate_uuid(), eventTime=timestamp.get_utc_now(), initiatorId=identifier.generate_uuid(), action='read', targetId=identifier.generate_uuid(), observerId=identifier.generate_uuid(), outcome='success') self.assertEqual(ev.is_valid(), True) ev = event.Event(eventType='activity', id=identifier.generate_uuid(), eventTime=timestamp.get_utc_now(), initiator=resource.Resource(typeURI='storage'), action='read', targetId=identifier.generate_uuid(), observer=resource.Resource(id='target'), outcome='success') self.assertEqual(ev.is_valid(), True) def test_event_unique(self): ev = event.Event(eventType='activity', initiator=resource.Resource(typeURI='storage'), action='read', target=resource.Resource(typeURI='storage'), observer=resource.Resource(id='target'), outcome='success') time.sleep(1) ev2 = event.Event(eventType='activity', initiator=resource.Resource(typeURI='storage'), action='read', target=resource.Resource(typeURI='storage'), observer=resource.Resource(id='target'), outcome='success') self.assertNotEqual(ev.id, ev2.id) self.assertNotEqual(ev.eventTime, ev2.eventTime) def test_event_resource_shortform_not_self(self): self.assertRaises(ValueError, lambda: event.Event( eventType='activity', initiator=resource.Resource(typeURI='storage'), action='read', target=resource.Resource(id='target'), observer=resource.Resource(id='target'), outcome='success')) self.assertRaises(ValueError, lambda: event.Event( eventType='activity', initiator=resource.Resource(id='initiator'), action='read', target=resource.Resource(typeURI='storage'), observer=resource.Resource(id='target'), outcome='success')) pycadf-0.4.1/pycadf/tests/base.py0000664000175400017540000000325112301705746020022 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test base classes. """ import fixtures import os.path import testtools from oslo.config import cfg from pycadf.openstack.common.fixture import moxstubout class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() self.tempdir = self.useFixture(fixtures.TempDir()) moxfixture = self.useFixture(moxstubout.MoxStubout()) self.mox = moxfixture.mox self.stubs = moxfixture.stubs cfg.CONF([], project='pycadf') def path_get(self, project_file=None): root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', ) ) if project_file: return os.path.join(root, project_file) else: return root def temp_config_file_path(self, name='api_audit_map.conf'): return os.path.join(self.tempdir.path, name) def tearDown(self): cfg.CONF.reset() super(TestCase, self).tearDown() pycadf-0.4.1/pycadf/tests/audit/0000775000175400017540000000000012301706030017626 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tests/audit/test_api.py0000664000175400017540000003434212301705747022034 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from oslo.config import cfg import webob from pycadf.audit import api from pycadf.tests import base class TestAuditApi(base.TestCase): ENV_HEADERS = {'HTTP_X_SERVICE_CATALOG': '''[{"endpoints_links": [], "endpoints": [{"adminURL": "http://admin_host:8774", "region": "RegionOne", "publicURL": "http://public_host:8775", "internalURL": "http://internal_host:8776", "id": "resource_id"}], "type": "compute", "name": "nova"},]''', 'HTTP_X_USER_ID': 'user_id', 'HTTP_X_USER_NAME': 'user_name', 'HTTP_X_AUTH_TOKEN': 'token', 'HTTP_X_PROJECT_ID': 'tenant_id', 'HTTP_X_IDENTITY_STATUS': 'Confirmed'} def setUp(self): super(TestAuditApi, self).setUp() self.audit_api = api.OpenStackAuditApi( 'etc/pycadf/api_audit_map.conf') def api_request(self, method, url): self.ENV_HEADERS['REQUEST_METHOD'] = method req = webob.Request.blank(url, environ=self.ENV_HEADERS, remote_addr='192.168.0.1') self.audit_api.append_audit_event(req) self.assertIn('CADF_EVENT_CORRELATION_ID', req.environ) return req def test_get_list_with_cfg(self): cfg.CONF.set_override( 'api_audit_map', self.path_get('etc/pycadf/api_audit_map.conf'), group='audit') self.audit_api = api.OpenStackAuditApi() req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers/') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['action'], 'read/list') def test_get_list(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['action'], 'read/list') self.assertEqual(payload['typeURI'], 'http://schemas.dmtf.org/cloud/audit/1.0/event') self.assertEqual(payload['outcome'], 'pending') self.assertEqual(payload['eventType'], 'activity') self.assertEqual(payload['target']['name'], 'nova') self.assertEqual(payload['target']['id'], 'openstack:resource_id') self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') self.assertEqual(len(payload['target']['addresses']), 3) self.assertEqual(payload['target']['addresses'][0]['name'], 'admin') self.assertEqual(payload['target']['addresses'][0]['url'], 'http://admin_host:8774') self.assertEqual(payload['initiator']['id'], 'openstack:user_id') self.assertEqual(payload['initiator']['name'], 'user_name') self.assertEqual(payload['initiator']['project_id'], 'openstack:tenant_id') self.assertEqual(payload['initiator']['host']['address'], '192.168.0.1') self.assertEqual(payload['initiator']['typeURI'], 'service/security/account/user') self.assertNotEqual(payload['initiator']['credential']['token'], 'token') self.assertEqual(payload['initiator']['credential']['identity_status'], 'Confirmed') self.assertNotIn('reason', payload) self.assertNotIn('reporterchain', payload) self.assertEqual(payload['observer']['id'], 'target') self.assertEqual(req.path, payload['requestPath']) def test_get_read(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers/' + str(uuid.uuid4())) payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers/server') self.assertEqual(payload['action'], 'read') self.assertEqual(payload['outcome'], 'pending') def test_get_unknown_endpoint(self): req = self.api_request('GET', 'http://unknown:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['action'], 'read/list') self.assertEqual(payload['outcome'], 'pending') self.assertEqual(payload['target']['name'], 'unknown') self.assertEqual(payload['target']['id'], 'unknown') self.assertEqual(payload['target']['typeURI'], 'unknown') def test_get_unknown_endpoint_default_set(self): tmpfile = self.temp_config_file_path() with open(tmpfile, "w") as f: f.write("[DEFAULT]\n") f.write("target_endpoint_type = compute \n") f.write("[path_keywords]\n") f.write("servers = server\n\n") f.write("[service_endpoints]\n") f.write("compute = service/compute") self.audit_api = api.OpenStackAuditApi(tmpfile) req = self.api_request('GET', 'http://unknown:8774/v2/' + str(uuid.uuid4()) + '/servers') self.assertEqual(self.audit_api._MAP.default_target_endpoint_type, 'compute') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['action'], 'read/list') self.assertEqual(payload['outcome'], 'pending') self.assertEqual(payload['target']['name'], 'nova') self.assertEqual(payload['target']['id'], 'openstack:resource_id') self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') def test_put(self): req = self.api_request('PUT', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') self.assertEqual(payload['action'], 'update') self.assertEqual(payload['outcome'], 'pending') def test_delete(self): req = self.api_request('DELETE', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') self.assertEqual(payload['action'], 'delete') self.assertEqual(payload['outcome'], 'pending') def test_head(self): req = self.api_request('HEAD', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') self.assertEqual(payload['action'], 'read') self.assertEqual(payload['outcome'], 'pending') def test_post_update(self): req = self.api_request('POST', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers/' + str(uuid.uuid4())) payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers/server') self.assertEqual(payload['action'], 'update') self.assertEqual(payload['outcome'], 'pending') def test_post_create(self): req = self.api_request('POST', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers') self.assertEqual(payload['action'], 'create') self.assertEqual(payload['outcome'], 'pending') def test_post_action(self): self.ENV_HEADERS['REQUEST_METHOD'] = 'POST' req = webob.Request.blank('http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers/action', environ=self.ENV_HEADERS) req.body = b'{"createImage" : {"name" : "new-image","metadata": ' \ b'{"ImageType": "Gold","ImageVersion": "2.0"}}}' self.audit_api.append_audit_event(req) payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers/action') self.assertEqual(payload['action'], 'update/createImage') self.assertEqual(payload['outcome'], 'pending') def test_post_empty_body_action(self): self.ENV_HEADERS['REQUEST_METHOD'] = 'POST' req = webob.Request.blank('http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers/action', environ=self.ENV_HEADERS) self.audit_api.append_audit_event(req) payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/servers/action') self.assertEqual(payload['action'], 'create') self.assertEqual(payload['outcome'], 'pending') def test_custom_action(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/os-hosts/' + str(uuid.uuid4()) + '/reboot') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/os-hosts/host/reboot') self.assertEqual(payload['action'], 'start/reboot') self.assertEqual(payload['outcome'], 'pending') def test_custom_action_complex(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/os-migrations') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/os-migrations') self.assertEqual(payload['action'], 'read') req = self.api_request('POST', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/os-migrations') payload = req.environ['CADF_EVENT'] self.assertEqual(payload['target']['typeURI'], 'service/compute/os-migrations') self.assertEqual(payload['action'], 'create') def test_response_mod_msg(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.audit_api.mod_audit_event(req, webob.Response()) payload2 = req.environ['CADF_EVENT'] self.assertEqual(payload['id'], payload2['id']) self.assertEqual(payload['tags'], payload2['tags']) self.assertEqual(payload2['outcome'], 'success') self.assertEqual(payload2['reason']['reasonType'], 'HTTP') self.assertEqual(payload2['reason']['reasonCode'], '200') self.assertEqual(len(payload2['reporterchain']), 1) self.assertEqual(payload2['reporterchain'][0]['role'], 'modifier') self.assertEqual(payload2['reporterchain'][0]['reporter']['id'], 'target') def test_no_response(self): req = self.api_request('GET', 'http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers') payload = req.environ['CADF_EVENT'] self.audit_api.mod_audit_event(req, None) payload2 = req.environ['CADF_EVENT'] self.assertEqual(payload['id'], payload2['id']) self.assertEqual(payload['tags'], payload2['tags']) self.assertEqual(payload2['outcome'], 'unknown') self.assertNotIn('reason', payload2) self.assertEqual(len(payload2['reporterchain']), 1) self.assertEqual(payload2['reporterchain'][0]['role'], 'modifier') self.assertEqual(payload2['reporterchain'][0]['reporter']['id'], 'target') def test_missing_req(self): self.ENV_HEADERS['REQUEST_METHOD'] = 'GET' req = webob.Request.blank('http://admin_host:8774/v2/' + str(uuid.uuid4()) + '/servers', environ=self.ENV_HEADERS) self.assertNotIn('CADF_EVENT', req.environ) self.audit_api.mod_audit_event(req, webob.Response()) self.assertIn('CADF_EVENT', req.environ) self.assertIn('CADF_EVENT_CORRELATION_ID', req.environ) payload = req.environ['CADF_EVENT'] self.assertEqual(payload['outcome'], 'success') self.assertEqual(payload['reason']['reasonType'], 'HTTP') self.assertEqual(payload['reason']['reasonCode'], '200') self.assertEqual(payload['observer']['id'], 'target') self.assertNotIn('reporterchain', payload) class TestAuditApiConf(base.TestCase): def test_missing_default_option(self): tmpfile = self.temp_config_file_path() # NOTE(gordc): ensure target_endpoint_type is not in conf file with open(tmpfile, "w") as f: f.write("[DEFAULT]\n") f.write("api_paths = servers\n\n") f.write("[service_endpoints]\n") f.write("compute = service/compute") self.audit_api = api.OpenStackAuditApi(tmpfile) pycadf-0.4.1/pycadf/tests/audit/__init__.py0000664000175400017540000000000012301705746021742 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tests/test_utils.py0000664000175400017540000000223012301705746021303 0ustar jenkinsjenkins00000000000000# # Copyright 2013 OpenStack LLC # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from pycadf.tests import base from pycadf import utils class TestUtils(base.TestCase): def test_mask_value(self): value = str(uuid.uuid4()) m_percent = 0.125 obfuscate = utils.mask_value(value, m_percent) visible = int(round(len(value) * m_percent)) self.assertEqual(value[:visible], obfuscate[:visible]) self.assertNotEqual(value[:visible + 1], obfuscate[:visible + 1]) self.assertEqual(value[-visible:], obfuscate[-visible:]) self.assertNotEqual(value[-visible - 1:], obfuscate[-visible - 1:]) pycadf-0.4.1/pycadf/tests/__init__.py0000664000175400017540000000000012301705746020634 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/resource.py0000664000175400017540000001505012301705746017575 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import attachment from pycadf import cadftaxonomy from pycadf import cadftype from pycadf import credential from pycadf import endpoint from pycadf import geolocation from pycadf import host from pycadf import identifier TYPE_URI_RESOURCE = cadftype.CADF_VERSION_1_0_0 + 'resource' RESOURCE_KEYNAME_TYPEURI = "typeURI" RESOURCE_KEYNAME_ID = "id" RESOURCE_KEYNAME_NAME = "name" RESOURCE_KEYNAME_DOMAIN = "domain" RESOURCE_KEYNAME_CRED = "credential" RESOURCE_KEYNAME_REF = "ref" RESOURCE_KEYNAME_GEO = "geolocation" RESOURCE_KEYNAME_GEOID = "geolocationId" RESOURCE_KEYNAME_HOST = "host" RESOURCE_KEYNAME_ADDRS = "addresses" RESOURCE_KEYNAME_ATTACHMENTS = "attachments" RESOURCE_KEYNAMES = [RESOURCE_KEYNAME_TYPEURI, RESOURCE_KEYNAME_ID, RESOURCE_KEYNAME_NAME, RESOURCE_KEYNAME_DOMAIN, RESOURCE_KEYNAME_CRED, RESOURCE_KEYNAME_REF, RESOURCE_KEYNAME_GEO, RESOURCE_KEYNAME_GEOID, RESOURCE_KEYNAME_HOST, RESOURCE_KEYNAME_ADDRS, RESOURCE_KEYNAME_ATTACHMENTS] class Resource(cadftype.CADFAbstractType): typeURI = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_TYPEURI, lambda x: cadftaxonomy.is_valid_resource(x)) id = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_ID, lambda x: identifier.is_valid(x)) name = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_NAME, lambda x: isinstance(x, six.string_types)) domain = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_DOMAIN, lambda x: isinstance( x, six.string_types)) credential = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_CRED, (lambda x: isinstance(x, credential.Credential) and x.is_valid())) host = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_HOST, lambda x: isinstance(x, host.Host)) # TODO(mrutkows): validate the "ref" attribute is indeed a URI (format), # If it is a URL, we do not need to validate it is accessible/working, # for audit purposes this could have been a valid URL at some point # in the past or a URL that is only valid within some domain (e.g. a # private cloud) ref = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_REF, lambda x: isinstance(x, six.string_types)) geolocation = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_GEO, lambda x: isinstance(x, geolocation.Geolocation)) geolocationId = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_GEOID, lambda x: identifier.is_valid(x)) def __init__(self, id=None, typeURI=cadftaxonomy.UNKNOWN, name=None, ref=None, domain=None, credential=None, host=None, geolocation=None, geolocationId=None): # Resource.id setattr(self, RESOURCE_KEYNAME_ID, id or identifier.generate_uuid()) # Resource.typeURI if (getattr(self, RESOURCE_KEYNAME_ID) != "target" and getattr(self, RESOURCE_KEYNAME_ID) != "initiator"): setattr(self, RESOURCE_KEYNAME_TYPEURI, typeURI) # Resource.name if name is not None: setattr(self, RESOURCE_KEYNAME_NAME, name) # Resource.ref if ref is not None: setattr(self, RESOURCE_KEYNAME_REF, ref) # Resource.domain if domain is not None: setattr(self, RESOURCE_KEYNAME_DOMAIN, domain) # Resource.credential if credential is not None: setattr(self, RESOURCE_KEYNAME_CRED, credential) # Resource.host if host is not None: setattr(self, RESOURCE_KEYNAME_HOST, host) # Resource.geolocation if geolocation is not None: setattr(self, RESOURCE_KEYNAME_GEO, geolocation) # Resource.geolocationId if geolocationId: setattr(self, RESOURCE_KEYNAME_GEOID, geolocationId) # Resource.address def add_address(self, addr): if (addr is not None and isinstance(addr, endpoint.Endpoint)): if addr.is_valid(): # Create the list of Endpoints if needed if not hasattr(self, RESOURCE_KEYNAME_ADDRS): setattr(self, RESOURCE_KEYNAME_ADDRS, list()) addrs = getattr(self, RESOURCE_KEYNAME_ADDRS) addrs.append(addr) else: raise ValueError('Invalid endpoint') else: raise ValueError('Invalid endpoint. Value must be an Endpoint') # Resource.attachments def add_attachment(self, attach_val): if (attach_val is not None and isinstance(attach_val, attachment.Attachment)): if attach_val.is_valid(): # Create the list of Attachments if needed if not hasattr(self, RESOURCE_KEYNAME_ATTACHMENTS): setattr(self, RESOURCE_KEYNAME_ATTACHMENTS, list()) attachments = getattr(self, RESOURCE_KEYNAME_ATTACHMENTS) attachments.append(attach_val) else: raise ValueError('Invalid attachment') else: raise ValueError('Invalid attachment. Value must be an Attachment') # self validate this cadf:Resource type against schema def is_valid(self): return (self._isset(RESOURCE_KEYNAME_ID) and (self._isset(RESOURCE_KEYNAME_TYPEURI) or ((getattr(self, RESOURCE_KEYNAME_ID) == "target" or getattr(self, RESOURCE_KEYNAME_ID) == "initiator") and len(vars(self).keys()) == 1))) # TODO(mrutkows): validate the Resource's attribute types pycadf-0.4.1/pycadf/cadftype.py0000664000175400017540000000610012301705746017541 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 IBM Corporation # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import six from pycadf.openstack.common import jsonutils CADF_SCHEMA_1_0_0 = 'cadf:' CADF_VERSION_1_0_0 = 'http://schemas.dmtf.org/cloud/audit/1.0/' # Valid cadf:Event record "types" EVENTTYPE_ACTIVITY = 'activity' EVENTTYPE_MONITOR = 'monitor' EVENTTYPE_CONTROL = 'control' VALID_EVENTTYPES = frozenset([ EVENTTYPE_ACTIVITY, EVENTTYPE_MONITOR, EVENTTYPE_CONTROL ]) def is_valid_eventType(value): return value in VALID_EVENTTYPES # valid cadf:Event record "Reporter" roles REPORTER_ROLE_OBSERVER = 'observer' REPORTER_ROLE_MODIFIER = 'modifier' REPORTER_ROLE_RELAY = 'relay' VALID_REPORTER_ROLES = frozenset([ REPORTER_ROLE_OBSERVER, REPORTER_ROLE_MODIFIER, REPORTER_ROLE_RELAY ]) def is_valid_reporter_role(value): return value in VALID_REPORTER_ROLES class ValidatorDescriptor(object): def __init__(self, name, func=None): self.name = name self.func = func def __set__(self, instance, value): if value is not None: if self.func is not None: if self.func(value): instance.__dict__[self.name] = value else: raise ValueError('%s failed validation: %s' % (self.name, self.func)) else: instance.__dict__[self.name] = value else: raise ValueError('%s must not be None.' % self.name) class CADFAbstractType(six.with_metaclass(abc.ABCMeta, object)): """The abstract base class for all CADF (complex) data types (classes).""" @abc.abstractmethod def is_valid(self, value): pass def as_dict(self): """Return dict representation of Event.""" return jsonutils.to_primitive(self, convert_instances=True) def _isset(self, attr): """Check to see if attribute is defined.""" try: if isinstance(getattr(self, attr), ValidatorDescriptor): return False return True except AttributeError: return False # TODO(mrutkows): Eventually, we want to use the OrderedDict (introduced # in Python 2.7) type for all CADF classes to store attributes in a # canonical form. Currently, OpenStack/Jenkins requires 2.6 compatibility # The reason is that we want to be able to support signing all or parts # of the event record and need to guarantee order. # def to_ordered_dict(self, value): # pass pycadf-0.4.1/pycadf/measurement.py0000664000175400017540000000500512301705746020272 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pycadf import cadftype from pycadf import identifier from pycadf import metric from pycadf import resource MEASUREMENT_KEYNAME_RESULT = "result" MEASUREMENT_KEYNAME_METRIC = "metric" MEASUREMENT_KEYNAME_METRICID = "metricId" MEASUREMENT_KEYNAME_CALCBY = "calculatedBy" MEASUREMENT_KEYNAMES = [MEASUREMENT_KEYNAME_RESULT, MEASUREMENT_KEYNAME_METRICID, MEASUREMENT_KEYNAME_METRIC, MEASUREMENT_KEYNAME_CALCBY] class Measurement(cadftype.CADFAbstractType): result = cadftype.ValidatorDescriptor(MEASUREMENT_KEYNAME_RESULT) metric = cadftype.ValidatorDescriptor( MEASUREMENT_KEYNAME_METRIC, lambda x: isinstance(x, metric.Metric)) metricId = cadftype.ValidatorDescriptor(MEASUREMENT_KEYNAME_METRICID, lambda x: identifier.is_valid(x)) calculatedBy = cadftype.ValidatorDescriptor( MEASUREMENT_KEYNAME_CALCBY, (lambda x: isinstance(x, resource.Resource) and x.is_valid())) def __init__(self, result=None, metric=None, metricId=None, calculatedBy=None): # Measurement.result if result is not None: setattr(self, MEASUREMENT_KEYNAME_RESULT, result) # Measurement.metricId if metricId is not None: setattr(self, MEASUREMENT_KEYNAME_METRICID, metricId) # Measurement.metric if metric is not None: setattr(self, MEASUREMENT_KEYNAME_METRIC, metric) # Measurement.calculaedBy if calculatedBy is not None: setattr(self, MEASUREMENT_KEYNAME_CALCBY, calculatedBy) # self validate this cadf:Measurement type against schema def is_valid(self): return (self._isset(MEASUREMENT_KEYNAME_RESULT) and (self._isset(MEASUREMENT_KEYNAME_METRIC) ^ self._isset(MEASUREMENT_KEYNAME_METRICID))) pycadf-0.4.1/pycadf/utils.py0000664000175400017540000000205612301705746017110 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six def mask_value(value, s_percent=0.125): """Obfuscate a given string to show only a percentage of leading and trailing characters. :param s_percent: The percentage of characters to replace """ if isinstance(value, six.string_types): visible = (32 if int(round(len(value) * s_percent)) > 32 else int(round(len(value) * s_percent))) return value[:visible] + " xxxxxxxx " + value[-visible:] return value pycadf-0.4.1/pycadf/openstack/0000775000175400017540000000000012301706030017345 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/openstack/common/0000775000175400017540000000000012301706030020635 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/openstack/common/fileutils.py0000664000175400017540000000750312301705746023231 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import errno import os import tempfile from pycadf.openstack.common import excutils from pycadf.openstack.common.gettextutils import _ from pycadf.openstack.common import log as logging LOG = logging.getLogger(__name__) _FILE_CACHE = {} def ensure_tree(path): """Create a directory (and any ancestor directories required) :param path: Directory to create """ try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST: if not os.path.isdir(path): raise else: raise def read_cached_file(filename, force_reload=False): """Read from a file if it has been modified. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. """ global _FILE_CACHE if force_reload and filename in _FILE_CACHE: del _FILE_CACHE[filename] reloaded = False mtime = os.path.getmtime(filename) cache_info = _FILE_CACHE.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): LOG.debug(_("Reloading cached file %s") % filename) with open(filename) as fap: cache_info['data'] = fap.read() cache_info['mtime'] = mtime reloaded = True return (reloaded, cache_info['data']) def delete_if_exists(path, remove=os.unlink): """Delete a file, but ignore file not found error. :param path: File to delete :param remove: Optional function to remove passed path """ try: remove(path) except OSError as e: if e.errno != errno.ENOENT: raise @contextlib.contextmanager def remove_path_on_error(path, remove=delete_if_exists): """Protect code that wants to operate on PATH atomically. Any exception will cause PATH to be removed. :param path: File to work with :param remove: Optional function to remove passed path """ try: yield except Exception: with excutils.save_and_reraise_exception(): remove(path) def file_open(*args, **kwargs): """Open file see built-in file() documentation for more details Note: The reason this is kept in a separate module is to easily be able to provide a stub module that doesn't alter system state at all (for unit tests) """ return file(*args, **kwargs) def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): """Create temporary file or use existing file. This util is needed for creating temporary file with specified content, suffix and prefix. If path is not None, it will be used for writing content. If the path doesn't exist it'll be created. :param content: content for temporary file. :param path: same as parameter 'dir' for mkstemp :param suffix: same as parameter 'suffix' for mkstemp :param prefix: same as parameter 'prefix' for mkstemp For example: it can be used in database tests for creating configuration files. """ if path: ensure_tree(path) (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) try: os.write(fd, content) finally: os.close(fd) return path pycadf-0.4.1/pycadf/openstack/common/importutils.py0000664000175400017540000000451312301705746023622 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Import related utilities and helper functions. """ import sys import traceback def import_class(import_str): """Returns a class from a string including module and class.""" mod_str, _sep, class_str = import_str.rpartition('.') try: __import__(mod_str) return getattr(sys.modules[mod_str], class_str) except (ValueError, AttributeError): raise ImportError('Class %s cannot be found (%s)' % (class_str, traceback.format_exception(*sys.exc_info()))) def import_object(import_str, *args, **kwargs): """Import a class and return an instance of it.""" return import_class(import_str)(*args, **kwargs) def import_object_ns(name_space, import_str, *args, **kwargs): """Tries to import object from default namespace. Imports a class and return an instance of it, first by trying to find the class in a default namespace, then failing back to a full path if not found in the default namespace. """ import_value = "%s.%s" % (name_space, import_str) try: return import_class(import_value)(*args, **kwargs) except ImportError: return import_class(import_str)(*args, **kwargs) def import_module(import_str): """Import a module.""" __import__(import_str) return sys.modules[import_str] def import_versioned_module(version, submodule=None): module = 'pycadf.v%s' % version if submodule: module = '.'.join((module, submodule)) return import_module(module) def try_import(import_str, default=None): """Try to import a module and if it fails return default.""" try: return import_module(import_str) except ImportError: return default pycadf-0.4.1/pycadf/openstack/common/context.py0000664000175400017540000000701512301705746022713 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Simple class that stores security context information in the web request. Projects should subclass this class if they wish to enhance the request context or provide additional information in their specific WSGI pipeline. """ import itertools import uuid def generate_request_id(): return 'req-%s' % str(uuid.uuid4()) class RequestContext(object): """Helper class to represent useful information about a request context. Stores information about the security context under which the user accesses the system, as well as additional request information. """ user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}' def __init__(self, auth_token=None, user=None, tenant=None, domain=None, user_domain=None, project_domain=None, is_admin=False, read_only=False, show_deleted=False, request_id=None, instance_uuid=None): self.auth_token = auth_token self.user = user self.tenant = tenant self.domain = domain self.user_domain = user_domain self.project_domain = project_domain self.is_admin = is_admin self.read_only = read_only self.show_deleted = show_deleted self.instance_uuid = instance_uuid if not request_id: request_id = generate_request_id() self.request_id = request_id def to_dict(self): user_idt = ( self.user_idt_format.format(user=self.user or '-', tenant=self.tenant or '-', domain=self.domain or '-', user_domain=self.user_domain or '-', p_domain=self.project_domain or '-')) return {'user': self.user, 'tenant': self.tenant, 'domain': self.domain, 'user_domain': self.user_domain, 'project_domain': self.project_domain, 'is_admin': self.is_admin, 'read_only': self.read_only, 'show_deleted': self.show_deleted, 'auth_token': self.auth_token, 'request_id': self.request_id, 'instance_uuid': self.instance_uuid, 'user_identity': user_idt} def get_admin_context(show_deleted=False): context = RequestContext(None, tenant=None, is_admin=True, show_deleted=show_deleted) return context def get_context_from_function_and_args(function, args, kwargs): """Find an arg of type RequestContext and return it. This is useful in a couple of decorators where we don't know much about the function we're wrapping. """ for arg in itertools.chain(kwargs.values(), args): if isinstance(arg, RequestContext): return arg return None pycadf-0.4.1/pycadf/openstack/common/lockutils.py0000664000175400017540000002232012301705746023234 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import errno import functools import os import shutil import subprocess import sys import tempfile import threading import time import weakref from oslo.config import cfg from pycadf.openstack.common import fileutils from pycadf.openstack.common.gettextutils import _ from pycadf.openstack.common import log as logging LOG = logging.getLogger(__name__) util_opts = [ cfg.BoolOpt('disable_process_locking', default=False, help='Whether to disable inter-process locks'), cfg.StrOpt('lock_path', default=os.environ.get("PYCADF_LOCK_PATH"), help=('Directory to use for lock files.')) ] CONF = cfg.CONF CONF.register_opts(util_opts) def set_defaults(lock_path): cfg.set_defaults(util_opts, lock_path=lock_path) class _InterProcessLock(object): """Lock implementation which allows multiple locks, working around issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does not require any cleanup. Since the lock is always held on a file descriptor rather than outside of the process, the lock gets dropped automatically if the process crashes, even if __exit__ is not executed. There are no guarantees regarding usage by multiple green threads in a single process here. This lock works only between processes. Exclusive access between local threads should be achieved using the semaphores in the @synchronized decorator. Note these locks are released when the descriptor is closed, so it's not safe to close the file descriptor while another green thread holds the lock. Just opening and closing the lock file can break synchronisation, so lock files must be accessed only using this abstraction. """ def __init__(self, name): self.lockfile = None self.fname = name def acquire(self): basedir = os.path.dirname(self.fname) if not os.path.exists(basedir): fileutils.ensure_tree(basedir) LOG.info(_('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') while True: try: # Using non-blocking locks since green threads are not # patched to deal with blocking locking calls. # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() LOG.debug(_('Got file lock "%s"'), self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning time.sleep(0.01) else: raise threading.ThreadError(_("Unable to acquire lock on" " `%(filename)s` due to" " %(exception)s") % { 'filename': self.fname, 'exception': e, }) def __enter__(self): self.acquire() return self def release(self): try: self.unlock() self.lockfile.close() LOG.debug(_('Released file lock "%s"'), self.fname) except IOError: LOG.exception(_("Could not release the acquired lock `%s`"), self.fname) def __exit__(self, exc_type, exc_val, exc_tb): self.release() def trylock(self): raise NotImplementedError() def unlock(self): raise NotImplementedError() class _WindowsLock(_InterProcessLock): def trylock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) def unlock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) class _PosixLock(_InterProcessLock): def trylock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) def unlock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_UN) if os.name == 'nt': import msvcrt InterProcessLock = _WindowsLock else: import fcntl InterProcessLock = _PosixLock _semaphores = weakref.WeakValueDictionary() _semaphores_lock = threading.Lock() def external_lock(name, lock_file_prefix=None): with internal_lock(name): LOG.debug(_('Attempting to grab external lock "%(lock)s"'), {'lock': name}) # NOTE(mikal): the lock name cannot contain directory # separators name = name.replace(os.sep, '_') if lock_file_prefix: sep = '' if lock_file_prefix.endswith('-') else '-' name = '%s%s%s' % (lock_file_prefix, sep, name) if not CONF.lock_path: raise cfg.RequiredOptError('lock_path') lock_file_path = os.path.join(CONF.lock_path, name) return InterProcessLock(lock_file_path) def internal_lock(name): with _semaphores_lock: try: sem = _semaphores[name] except KeyError: sem = threading.Semaphore() _semaphores[name] = sem LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) return sem @contextlib.contextmanager def lock(name, lock_file_prefix=None, external=False): """Context based lock This function yields a `threading.Semaphore` instance (if we don't use eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is True, in which case, it'll yield an InterProcessLock instance. :param lock_file_prefix: The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. :param external: The external keyword argument denotes whether this lock should work across multiple processes. This means that if two different workers both run a a method decorated with @synchronized('mylock', external=True), only one of them will execute at a time. """ if external and not CONF.disable_process_locking: lock = external_lock(name, lock_file_prefix) else: lock = internal_lock(name) with lock: yield lock def synchronized(name, lock_file_prefix=None, external=False): """Synchronization decorator. Decorating a method like so:: @synchronized('mylock') def foo(self, *args): ... ensures that only one thread will execute the foo method at a time. Different methods can share the same lock:: @synchronized('mylock') def foo(self, *args): ... @synchronized('mylock') def bar(self, *args): ... This way only one of either foo or bar can be executing at a time. """ def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): try: with lock(name, lock_file_prefix, external): LOG.debug(_('Got semaphore / lock "%(function)s"'), {'function': f.__name__}) return f(*args, **kwargs) finally: LOG.debug(_('Semaphore / lock released "%(function)s"'), {'function': f.__name__}) return inner return wrap def synchronized_with_prefix(lock_file_prefix): """Partial object generator for the synchronization decorator. Redefine @synchronized in each project like so:: (in nova/utils.py) from nova.openstack.common import lockutils synchronized = lockutils.synchronized_with_prefix('nova-') (in nova/foo.py) from nova import utils @utils.synchronized('mylock') def bar(self, *args): ... The lock_file_prefix argument is used to provide lock files on disk with a meaningful prefix. """ return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) def main(argv): """Create a dir for locks and pass it to command from arguments If you run this: python -m openstack.common.lockutils python setup.py testr a temporary directory will be created for all your locks and passed to all your tests in an environment variable. The temporary dir will be deleted afterwards and the return value will be preserved. """ lock_dir = tempfile.mkdtemp() os.environ["PYCADF_LOCK_PATH"] = lock_dir try: ret_val = subprocess.call(argv[1:]) finally: shutil.rmtree(lock_dir, ignore_errors=True) return ret_val if __name__ == '__main__': sys.exit(main(sys.argv)) pycadf-0.4.1/pycadf/openstack/common/timeutils.py0000664000175400017540000001424112301705746023245 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Time related utilities and helper functions. """ import calendar import datetime import time import iso8601 import six # ISO 8601 extended time format with microseconds _ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND def isotime(at=None, subsecond=False): """Stringify time in ISO 8601 format.""" if not at: at = utcnow() st = at.strftime(_ISO8601_TIME_FORMAT if not subsecond else _ISO8601_TIME_FORMAT_SUBSECOND) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st def parse_isotime(timestr): """Parse time from ISO 8601 format.""" try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: raise ValueError(six.text_type(e)) except TypeError as e: raise ValueError(six.text_type(e)) def strtime(at=None, fmt=PERFECT_TIME_FORMAT): """Returns formatted utcnow.""" if not at: at = utcnow() return at.strftime(fmt) def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): """Turn a formatted time back into a datetime.""" return datetime.datetime.strptime(timestr, fmt) def normalize_time(timestamp): """Normalize time in arbitrary timezone to UTC naive object.""" offset = timestamp.utcoffset() if offset is None: return timestamp return timestamp.replace(tzinfo=None) - offset def is_older_than(before, seconds): """Return True if before is older than seconds.""" if isinstance(before, six.string_types): before = parse_strtime(before).replace(tzinfo=None) else: before = before.replace(tzinfo=None) return utcnow() - before > datetime.timedelta(seconds=seconds) def is_newer_than(after, seconds): """Return True if after is newer than seconds.""" if isinstance(after, six.string_types): after = parse_strtime(after).replace(tzinfo=None) else: after = after.replace(tzinfo=None) return after - utcnow() > datetime.timedelta(seconds=seconds) def utcnow_ts(): """Timestamp version of our utcnow function.""" if utcnow.override_time is None: # NOTE(kgriffs): This is several times faster # than going through calendar.timegm(...) return int(time.time()) return calendar.timegm(utcnow().timetuple()) def utcnow(): """Overridable version of utils.utcnow.""" if utcnow.override_time: try: return utcnow.override_time.pop(0) except AttributeError: return utcnow.override_time return datetime.datetime.utcnow() def iso8601_from_timestamp(timestamp): """Returns a iso8601 formatted date from timestamp.""" return isotime(datetime.datetime.utcfromtimestamp(timestamp)) utcnow.override_time = None def set_time_override(override_time=None): """Overrides utils.utcnow. Make it return a constant time or a list thereof, one at a time. :param override_time: datetime instance or list thereof. If not given, defaults to the current UTC time. """ utcnow.override_time = override_time or datetime.datetime.utcnow() def advance_time_delta(timedelta): """Advance overridden time using a datetime.timedelta.""" assert(not utcnow.override_time is None) try: for dt in utcnow.override_time: dt += timedelta except TypeError: utcnow.override_time += timedelta def advance_time_seconds(seconds): """Advance overridden time by seconds.""" advance_time_delta(datetime.timedelta(0, seconds)) def clear_time_override(): """Remove the overridden time.""" utcnow.override_time = None def marshall_now(now=None): """Make an rpc-safe datetime with microseconds. Note: tzinfo is stripped, but not required for relative times. """ if not now: now = utcnow() return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, minute=now.minute, second=now.second, microsecond=now.microsecond) def unmarshall_time(tyme): """Unmarshall a datetime dict.""" return datetime.datetime(day=tyme['day'], month=tyme['month'], year=tyme['year'], hour=tyme['hour'], minute=tyme['minute'], second=tyme['second'], microsecond=tyme['microsecond']) def delta_seconds(before, after): """Return the difference between two timing objects. Compute the difference in seconds between two date, time, or datetime objects (as a float, to microsecond resolution). """ delta = after - before return total_seconds(delta) def total_seconds(delta): """Return the total seconds of datetime.timedelta object. Compute total seconds of datetime.timedelta, datetime.timedelta doesn't have method total_seconds in Python2.6, calculate it manually. """ try: return delta.total_seconds() except AttributeError: return ((delta.days * 24 * 3600) + delta.seconds + float(delta.microseconds) / (10 ** 6)) def is_soon(dt, window): """Determines if time is going to happen in the next window seconds. :param dt: the time :param window: minimum seconds to remain to consider the time not soon :return: True if expiration is within the given duration """ soon = (utcnow() + datetime.timedelta(seconds=window)) return normalize_time(dt) <= soon pycadf-0.4.1/pycadf/openstack/common/jsonutils.py0000664000175400017540000001506712301705746023267 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ''' JSON related utilities. This module provides a few things: 1) A handy function for getting an object down to something that can be JSON serialized. See to_primitive(). 2) Wrappers around loads() and dumps(). The dumps() wrapper will automatically use to_primitive() for you if needed. 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson is available. ''' import datetime import functools import inspect import itertools import json try: import xmlrpclib except ImportError: # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3 # however the function and object call signatures # remained the same. This whole try/except block should # be removed and replaced with a call to six.moves once # six 1.4.2 is released. See http://bit.ly/1bqrVzu import xmlrpc.client as xmlrpclib import six from pycadf.openstack.common import gettextutils from pycadf.openstack.common import importutils from pycadf.openstack.common import timeutils netaddr = importutils.try_import("netaddr") _nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, inspect.isfunction, inspect.isgeneratorfunction, inspect.isgenerator, inspect.istraceback, inspect.isframe, inspect.iscode, inspect.isbuiltin, inspect.isroutine, inspect.isabstract] _simple_types = (six.string_types + six.integer_types + (type(None), bool, float)) def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): """Convert a complex object into primitives. Handy for JSON serialization. We can optionally handle instances, but since this is a recursive function, we could have cyclical data structures. To handle cyclical data structures we could track the actual objects visited in a set, but not all objects are hashable. Instead we just track the depth of the object inspections and don't go too deep. Therefore, convert_instances=True is lossy ... be aware. """ # handle obvious types first - order of basic types determined by running # full tests on nova project, resulting in the following counts: # 572754 # 460353 # 379632 # 274610 # 199918 # 114200 # 51817 # 26164 # 6491 # 283 # 19 if isinstance(value, _simple_types): return value if isinstance(value, datetime.datetime): if convert_datetime: return timeutils.strtime(value) else: return value # value of itertools.count doesn't get caught by nasty_type_tests # and results in infinite loop when list(value) is called. if type(value) == itertools.count: return six.text_type(value) # FIXME(vish): Workaround for LP bug 852095. Without this workaround, # tests that raise an exception in a mocked method that # has a @wrap_exception with a notifier will fail. If # we up the dependency to 0.5.4 (when it is released) we # can remove this workaround. if getattr(value, '__module__', None) == 'mox': return 'mock' if level > max_depth: return '?' # The try block may not be necessary after the class check above, # but just in case ... try: recursive = functools.partial(to_primitive, convert_instances=convert_instances, convert_datetime=convert_datetime, level=level, max_depth=max_depth) if isinstance(value, dict): return dict((k, recursive(v)) for k, v in six.iteritems(value)) elif isinstance(value, (list, tuple)): return [recursive(lv) for lv in value] # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled if isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) elif isinstance(value, gettextutils.Message): return value.data elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) elif hasattr(value, '__iter__'): return recursive(list(value)) elif convert_instances and hasattr(value, '__dict__'): # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) elif netaddr and isinstance(value, netaddr.IPAddress): return six.text_type(value) else: if any(test(value) for test in _nasty_type_tests): return six.text_type(value) return value except TypeError: # Class objects are tricky since they may define something like # __iter__ defined but it isn't callable as list(). return six.text_type(value) def dumps(value, default=to_primitive, **kwargs): return json.dumps(value, default=default, **kwargs) def loads(s): return json.loads(s) def load(s): return json.load(s) try: import anyjson except ImportError: pass else: anyjson._modules.append((__name__, 'dumps', TypeError, 'loads', ValueError, 'load')) anyjson.force_implementation(__name__) pycadf-0.4.1/pycadf/openstack/common/gettextutils.py0000664000175400017540000004454212301705746024002 0ustar jenkinsjenkins00000000000000# Copyright 2012 Red Hat, Inc. # Copyright 2013 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ gettext for openstack-common modules. Usual usage in an openstack.common module: from pycadf.openstack.common.gettextutils import _ """ import copy import functools import gettext import locale from logging import handlers import os import re from babel import localedata import six _localedir = os.environ.get('pycadf'.upper() + '_LOCALEDIR') _t = gettext.translation('pycadf', localedir=_localedir, fallback=True) # We use separate translation catalogs for each log level, so set up a # mapping between the log level name and the translator. The domain # for the log level is project_name + "-log-" + log_level so messages # for each level end up in their own catalog. _t_log_levels = dict( (level, gettext.translation('pycadf' + '-log-' + level, localedir=_localedir, fallback=True)) for level in ['info', 'warning', 'error', 'critical'] ) _AVAILABLE_LANGUAGES = {} USE_LAZY = False def enable_lazy(): """Convenience function for configuring _() to use lazy gettext Call this at the start of execution to enable the gettextutils._ function to use lazy gettext functionality. This is useful if your project is importing _ directly instead of using the gettextutils.install() way of importing the _ function. """ global USE_LAZY USE_LAZY = True def _(msg): if USE_LAZY: return Message(msg, domain='pycadf') else: if six.PY3: return _t.gettext(msg) return _t.ugettext(msg) def _log_translation(msg, level): """Build a single translation of a log message """ if USE_LAZY: return Message(msg, domain='pycadf' + '-log-' + level) else: translator = _t_log_levels[level] if six.PY3: return translator.gettext(msg) return translator.ugettext(msg) # Translators for log levels. # # The abbreviated names are meant to reflect the usual use of a short # name like '_'. The "L" is for "log" and the other letter comes from # the level. _LI = functools.partial(_log_translation, level='info') _LW = functools.partial(_log_translation, level='warning') _LE = functools.partial(_log_translation, level='error') _LC = functools.partial(_log_translation, level='critical') def install(domain, lazy=False): """Install a _() function using the given translation domain. Given a translation domain, install a _() function using gettext's install() function. The main difference from gettext.install() is that we allow overriding the default localedir (e.g. /usr/share/locale) using a translation-domain-specific environment variable (e.g. NOVA_LOCALEDIR). :param domain: the translation domain :param lazy: indicates whether or not to install the lazy _() function. The lazy _() introduces a way to do deferred translation of messages by installing a _ that builds Message objects, instead of strings, which can then be lazily translated into any available locale. """ if lazy: # NOTE(mrodden): Lazy gettext functionality. # # The following introduces a deferred way to do translations on # messages in OpenStack. We override the standard _() function # and % (format string) operation to build Message objects that can # later be translated when we have more information. def _lazy_gettext(msg): """Create and return a Message object. Lazy gettext function for a given domain, it is a factory method for a project/module to get a lazy gettext function for its own translation domain (i.e. nova, glance, cinder, etc.) Message encapsulates a string so that we can translate it later when needed. """ return Message(msg, domain=domain) from six import moves moves.builtins.__dict__['_'] = _lazy_gettext else: localedir = '%s_LOCALEDIR' % domain.upper() if six.PY3: gettext.install(domain, localedir=os.environ.get(localedir)) else: gettext.install(domain, localedir=os.environ.get(localedir), unicode=True) class Message(six.text_type): """A Message object is a unicode object that can be translated. Translation of Message is done explicitly using the translate() method. For all non-translation intents and purposes, a Message is simply unicode, and can be treated as such. """ def __new__(cls, msgid, msgtext=None, params=None, domain='pycadf', *args): """Create a new Message object. In order for translation to work gettext requires a message ID, this msgid will be used as the base unicode text. It is also possible for the msgid and the base unicode text to be different by passing the msgtext parameter. """ # If the base msgtext is not given, we use the default translation # of the msgid (which is in English) just in case the system locale is # not English, so that the base text will be in that locale by default. if not msgtext: msgtext = Message._translate_msgid(msgid, domain) # We want to initialize the parent unicode with the actual object that # would have been plain unicode if 'Message' was not enabled. msg = super(Message, cls).__new__(cls, msgtext) msg.msgid = msgid msg.domain = domain msg.params = params return msg def translate(self, desired_locale=None): """Translate this message to the desired locale. :param desired_locale: The desired locale to translate the message to, if no locale is provided the message will be translated to the system's default locale. :returns: the translated message in unicode """ translated_message = Message._translate_msgid(self.msgid, self.domain, desired_locale) if self.params is None: # No need for more translation return translated_message # This Message object may have been formatted with one or more # Message objects as substitution arguments, given either as a single # argument, part of a tuple, or as one or more values in a dictionary. # When translating this Message we need to translate those Messages too translated_params = _translate_args(self.params, desired_locale) translated_message = translated_message % translated_params return translated_message @staticmethod def _translate_msgid(msgid, domain, desired_locale=None): if not desired_locale: system_locale = locale.getdefaultlocale() # If the system locale is not available to the runtime use English if not system_locale[0]: desired_locale = 'en_US' else: desired_locale = system_locale[0] locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') lang = gettext.translation(domain, localedir=locale_dir, languages=[desired_locale], fallback=True) if six.PY3: translator = lang.gettext else: translator = lang.ugettext translated_message = translator(msgid) return translated_message def __mod__(self, other): # When we mod a Message we want the actual operation to be performed # by the parent class (i.e. unicode()), the only thing we do here is # save the original msgid and the parameters in case of a translation params = self._sanitize_mod_params(other) unicode_mod = super(Message, self).__mod__(params) modded = Message(self.msgid, msgtext=unicode_mod, params=params, domain=self.domain) return modded def _sanitize_mod_params(self, other): """Sanitize the object being modded with this Message. - Add support for modding 'None' so translation supports it - Trim the modded object, which can be a large dictionary, to only those keys that would actually be used in a translation - Snapshot the object being modded, in case the message is translated, it will be used as it was when the Message was created """ if other is None: params = (other,) elif isinstance(other, dict): params = self._trim_dictionary_parameters(other) else: params = self._copy_param(other) return params def _trim_dictionary_parameters(self, dict_param): """Return a dict that only has matching entries in the msgid.""" # NOTE(luisg): Here we trim down the dictionary passed as parameters # to avoid carrying a lot of unnecessary weight around in the message # object, for example if someone passes in Message() % locals() but # only some params are used, and additionally we prevent errors for # non-deepcopyable objects by unicoding() them. # Look for %(param) keys in msgid; # Skip %% and deal with the case where % is first character on the line keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid) # If we don't find any %(param) keys but have a %s if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid): # Apparently the full dictionary is the parameter params = self._copy_param(dict_param) else: params = {} # Save our existing parameters as defaults to protect # ourselves from losing values if we are called through an # (erroneous) chain that builds a valid Message with # arguments, and then does something like "msg % kwds" # where kwds is an empty dictionary. src = {} if isinstance(self.params, dict): src.update(self.params) src.update(dict_param) for key in keys: params[key] = self._copy_param(src[key]) return params def _copy_param(self, param): try: return copy.deepcopy(param) except TypeError: # Fallback to casting to unicode this will handle the # python code-like objects that can't be deep-copied return six.text_type(param) def __add__(self, other): msg = _('Message objects do not support addition.') raise TypeError(msg) def __radd__(self, other): return self.__add__(other) def __str__(self): # NOTE(luisg): Logging in python 2.6 tries to str() log records, # and it expects specifically a UnicodeError in order to proceed. msg = _('Message objects do not support str() because they may ' 'contain non-ascii characters. ' 'Please use unicode() or translate() instead.') raise UnicodeError(msg) def get_available_languages(domain): """Lists the available languages for the given translation domain. :param domain: the domain to get languages for """ if domain in _AVAILABLE_LANGUAGES: return copy.copy(_AVAILABLE_LANGUAGES[domain]) localedir = '%s_LOCALEDIR' % domain.upper() find = lambda x: gettext.find(domain, localedir=os.environ.get(localedir), languages=[x]) # NOTE(mrodden): en_US should always be available (and first in case # order matters) since our in-line message strings are en_US language_list = ['en_US'] # NOTE(luisg): Babel <1.0 used a function called list(), which was # renamed to locale_identifiers() in >=1.0, the requirements master list # requires >=0.9.6, uncapped, so defensively work with both. We can remove # this check when the master list updates to >=1.0, and update all projects list_identifiers = (getattr(localedata, 'list', None) or getattr(localedata, 'locale_identifiers')) locale_identifiers = list_identifiers() for i in locale_identifiers: if find(i) is not None: language_list.append(i) # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they # are perfectly legitimate locales: # https://github.com/mitsuhiko/babel/issues/37 # In Babel 1.3 they fixed the bug and they support these locales, but # they are still not explicitly "listed" by locale_identifiers(). # That is why we add the locales here explicitly if necessary so that # they are listed as supported. aliases = {'zh': 'zh_CN', 'zh_Hant_HK': 'zh_HK', 'zh_Hant': 'zh_TW', 'fil': 'tl_PH'} for (locale, alias) in six.iteritems(aliases): if locale in language_list and alias not in language_list: language_list.append(alias) _AVAILABLE_LANGUAGES[domain] = language_list return copy.copy(language_list) def translate(obj, desired_locale=None): """Gets the translated unicode representation of the given object. If the object is not translatable it is returned as-is. If the locale is None the object is translated to the system locale. :param obj: the object to translate :param desired_locale: the locale to translate the message to, if None the default system locale will be used :returns: the translated object in unicode, or the original object if it could not be translated """ message = obj if not isinstance(message, Message): # If the object to translate is not already translatable, # let's first get its unicode representation message = six.text_type(obj) if isinstance(message, Message): # Even after unicoding() we still need to check if we are # running with translatable unicode before translating return message.translate(desired_locale) return obj def _translate_args(args, desired_locale=None): """Translates all the translatable elements of the given arguments object. This method is used for translating the translatable values in method arguments which include values of tuples or dictionaries. If the object is not a tuple or a dictionary the object itself is translated if it is translatable. If the locale is None the object is translated to the system locale. :param args: the args to translate :param desired_locale: the locale to translate the args to, if None the default system locale will be used :returns: a new args object with the translated contents of the original """ if isinstance(args, tuple): return tuple(translate(v, desired_locale) for v in args) if isinstance(args, dict): translated_dict = {} for (k, v) in six.iteritems(args): translated_v = translate(v, desired_locale) translated_dict[k] = translated_v return translated_dict return translate(args, desired_locale) class TranslationHandler(handlers.MemoryHandler): """Handler that translates records before logging them. The TranslationHandler takes a locale and a target logging.Handler object to forward LogRecord objects to after translating them. This handler depends on Message objects being logged, instead of regular strings. The handler can be configured declaratively in the logging.conf as follows: [handlers] keys = translatedlog, translator [handler_translatedlog] class = handlers.WatchedFileHandler args = ('/var/log/api-localized.log',) formatter = context [handler_translator] class = openstack.common.log.TranslationHandler target = translatedlog args = ('zh_CN',) If the specified locale is not available in the system, the handler will log in the default locale. """ def __init__(self, locale=None, target=None): """Initialize a TranslationHandler :param locale: locale to use for translating messages :param target: logging.Handler object to forward LogRecord objects to after translation """ # NOTE(luisg): In order to allow this handler to be a wrapper for # other handlers, such as a FileHandler, and still be able to # configure it using logging.conf, this handler has to extend # MemoryHandler because only the MemoryHandlers' logging.conf # parsing is implemented such that it accepts a target handler. handlers.MemoryHandler.__init__(self, capacity=0, target=target) self.locale = locale def setFormatter(self, fmt): self.target.setFormatter(fmt) def emit(self, record): # We save the message from the original record to restore it # after translation, so other handlers are not affected by this original_msg = record.msg original_args = record.args try: self._translate_and_log_record(record) finally: record.msg = original_msg record.args = original_args def _translate_and_log_record(self, record): record.msg = translate(record.msg, self.locale) # In addition to translating the message, we also need to translate # arguments that were passed to the log method that were not part # of the main message e.g., log.info(_('Some message %s'), this_one)) record.args = _translate_args(record.args, self.locale) self.target.emit(record) pycadf-0.4.1/pycadf/openstack/common/log.py0000664000175400017540000006221612301705746022014 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Openstack logging handler. This module adds to logging functionality by adding the option to specify a context object when calling the various log methods. If the context object is not specified, default formatting is used. Additionally, an instance uuid may be passed as part of the log message, which is intended to make it easier for admins to find messages related to a specific instance. It also allows setting of formatting information through conf. """ import inspect import itertools import logging import logging.config import logging.handlers import os import re import sys import traceback from oslo.config import cfg import six from six import moves from pycadf.openstack.common.gettextutils import _ from pycadf.openstack.common import importutils from pycadf.openstack.common import jsonutils from pycadf.openstack.common import local _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" _SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password'] # NOTE(ldbragst): Let's build a list of regex objects using the list of # _SANITIZE_KEYS we already have. This way, we only have to add the new key # to the list of _SANITIZE_KEYS and we can generate regular expressions # for XML and JSON automatically. _SANITIZE_PATTERNS = [] _FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', r'(<%(key)s>).*?()', r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] for key in _SANITIZE_KEYS: for pattern in _FORMAT_PATTERNS: reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) _SANITIZE_PATTERNS.append(reg_ex) common_cli_opts = [ cfg.BoolOpt('debug', short='d', default=False, help='Print debugging output (set logging level to ' 'DEBUG instead of default WARNING level).'), cfg.BoolOpt('verbose', short='v', default=False, help='Print more verbose output (set logging level to ' 'INFO instead of default WARNING level).'), ] logging_cli_opts = [ cfg.StrOpt('log-config-append', metavar='PATH', deprecated_name='log-config', help='The name of logging configuration file. It does not ' 'disable existing loggers, but just appends specified ' 'logging configuration to any other existing logging ' 'options. Please see the Python logging module ' 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', default=None, metavar='FORMAT', help='DEPRECATED. ' 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' 'This option is deprecated. Please use ' 'logging_context_format_string and ' 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', help='Format string for %%(asctime)s in log records. ' 'Default: %(default)s'), cfg.StrOpt('log-file', metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', help='(Optional) The base directory used for relative ' '--log-file paths'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging. ' 'Existing syslog format is DEPRECATED during I, ' 'and then will be changed in J to honor RFC5424'), cfg.BoolOpt('use-syslog-rfc-format', # TODO(bogdando) remove or use True after existing # syslog format deprecation in J default=False, help='(Optional) Use syslog rfc5424 format for logging. ' 'If enabled, will add APP-NAME (RFC5424) before the ' 'MSG part of the syslog message. The old format ' 'without APP-NAME is deprecated in I, ' 'and will be removed in J.'), cfg.StrOpt('syslog-log-facility', default='LOG_USER', help='Syslog facility to receive log lines') ] generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, help='Log output to standard error') ] log_opts = [ cfg.StrOpt('logging_context_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [%(request_id)s %(user_identity)s] ' '%(instance)s%(message)s', help='Format string to use for log messages with context'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [-] %(instance)s%(message)s', help='Format string to use for log messages without context'), cfg.StrOpt('logging_debug_format_suffix', default='%(funcName)s %(pathname)s:%(lineno)d', help='Data to append to log format when level is DEBUG'), cfg.StrOpt('logging_exception_prefix', default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' '%(instance)s', help='Prefix each line of exception output with this format'), cfg.ListOpt('default_log_levels', default=[ 'amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN' ], help='List of logger=LEVEL pairs'), cfg.BoolOpt('publish_errors', default=False, help='Publish error events'), cfg.BoolOpt('fatal_deprecations', default=False, help='Make deprecations fatal'), # NOTE(mikal): there are two options here because sometimes we are handed # a full instance (and could include more information), and other times we # are just handed a UUID for the instance. cfg.StrOpt('instance_format', default='[instance: %(uuid)s] ', help='If an instance is passed with the log message, format ' 'it like this'), cfg.StrOpt('instance_uuid_format', default='[instance: %(uuid)s] ', help='If an instance UUID is passed with the log message, ' 'format it like this'), ] CONF = cfg.CONF CONF.register_cli_opts(common_cli_opts) CONF.register_cli_opts(logging_cli_opts) CONF.register_opts(generic_log_opts) CONF.register_opts(log_opts) # our new audit level # NOTE(jkoelker) Since we synthesized an audit level, make the logging # module aware of it so it acts like other levels. logging.AUDIT = logging.INFO + 1 logging.addLevelName(logging.AUDIT, 'AUDIT') try: NullHandler = logging.NullHandler except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None def _dictify_context(context): if context is None: return None if not isinstance(context, dict) and getattr(context, 'to_dict', None): context = context.to_dict() return context def _get_binary_name(): return os.path.basename(inspect.stack()[-1][1]) def _get_log_file_path(binary=None): logfile = CONF.log_file logdir = CONF.log_dir if logfile and not logdir: return logfile if logfile and logdir: return os.path.join(logdir, logfile) if logdir: binary = binary or _get_binary_name() return '%s.log' % (os.path.join(logdir, binary),) return None def mask_password(message, secret="***"): """Replace password with 'secret' in message. :param message: The string which includes security information. :param secret: value with which to replace passwords. :returns: The unicode value of message with the password fields masked. For example: >>> mask_password("'adminPass' : 'aaaaa'") "'adminPass' : '***'" >>> mask_password("'admin_pass' : 'aaaaa'") "'admin_pass' : '***'" >>> mask_password('"password" : "aaaaa"') '"password" : "***"' >>> mask_password("'original_password' : 'aaaaa'") "'original_password' : '***'" >>> mask_password("u'original_password' : u'aaaaa'") "u'original_password' : u'***'" """ message = six.text_type(message) # NOTE(ldbragst): Check to see if anything in message contains any key # specified in _SANITIZE_KEYS, if not then just return the message since # we don't have to mask any passwords. if not any(key in message for key in _SANITIZE_KEYS): return message secret = r'\g<1>' + secret + r'\g<2>' for pattern in _SANITIZE_PATTERNS: message = re.sub(pattern, secret, message) return message class BaseLoggerAdapter(logging.LoggerAdapter): def audit(self, msg, *args, **kwargs): self.log(logging.AUDIT, msg, *args, **kwargs) class LazyAdapter(BaseLoggerAdapter): def __init__(self, name='unknown', version='unknown'): self._logger = None self.extra = {} self.name = name self.version = version @property def logger(self): if not self._logger: self._logger = getLogger(self.name, self.version) return self._logger class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): self.logger = logger self.project = project_name self.version = version_string self._deprecated_messages_sent = dict() @property def handlers(self): return self.logger.handlers def deprecated(self, msg, *args, **kwargs): """Call this method when a deprecated feature is used. If the system is configured for fatal deprecations then the message is logged at the 'critical' level and :class:`DeprecatedConfig` will be raised. Otherwise, the message will be logged (once) at the 'warn' level. :raises: :class:`DeprecatedConfig` if the system is configured for fatal deprecations. """ stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) # Using a list because a tuple with dict can't be stored in a set. sent_args = self._deprecated_messages_sent.setdefault(msg, list()) if args in sent_args: # Already logged this message, so don't log it again. return sent_args.append(args) self.warn(stdmsg, *args, **kwargs) def process(self, msg, kwargs): # NOTE(mrodden): catch any Message/other object and # coerce to unicode before they can get # to the python logging and possibly # cause string encoding trouble if not isinstance(msg, six.string_types): msg = six.text_type(msg) if 'extra' not in kwargs: kwargs['extra'] = {} extra = kwargs['extra'] context = kwargs.pop('context', None) if not context: context = getattr(local.store, 'context', None) if context: extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) instance_uuid = (extra.get('instance_uuid', None) or kwargs.pop('instance_uuid', None)) instance_extra = '' if instance: instance_extra = CONF.instance_format % instance elif instance_uuid: instance_extra = (CONF.instance_uuid_format % {'uuid': instance_uuid}) extra['instance'] = instance_extra extra.setdefault('user_identity', kwargs.pop('user_identity', None)) extra['project'] = self.project extra['version'] = self.version extra['extra'] = extra.copy() return msg, kwargs class JSONFormatter(logging.Formatter): def __init__(self, fmt=None, datefmt=None): # NOTE(jkoelker) we ignore the fmt argument, but its still there # since logging.config.fileConfig passes it. self.datefmt = datefmt def formatException(self, ei, strip_newlines=True): lines = traceback.format_exception(*ei) if strip_newlines: lines = [moves.filter( lambda x: x, line.rstrip().splitlines()) for line in lines] lines = list(itertools.chain(*lines)) return lines def format(self, record): message = {'message': record.getMessage(), 'asctime': self.formatTime(record, self.datefmt), 'name': record.name, 'msg': record.msg, 'args': record.args, 'levelname': record.levelname, 'levelno': record.levelno, 'pathname': record.pathname, 'filename': record.filename, 'module': record.module, 'lineno': record.lineno, 'funcname': record.funcName, 'created': record.created, 'msecs': record.msecs, 'relative_created': record.relativeCreated, 'thread': record.thread, 'thread_name': record.threadName, 'process_name': record.processName, 'process': record.process, 'traceback': None} if hasattr(record, 'extra'): message['extra'] = record.extra if record.exc_info: message['traceback'] = self.formatException(record.exc_info) return jsonutils.dumps(message) def _create_logging_excepthook(product_name): def logging_excepthook(exc_type, value, tb): extra = {} if CONF.verbose or CONF.debug: extra['exc_info'] = (exc_type, value, tb) getLogger(product_name).critical( "".join(traceback.format_exception_only(exc_type, value)), **extra) return logging_excepthook class LogConfigError(Exception): message = _('Error loading logging config %(log_config)s: %(err_msg)s') def __init__(self, log_config, err_msg): self.log_config = log_config self.err_msg = err_msg def __str__(self): return self.message % dict(log_config=self.log_config, err_msg=self.err_msg) def _load_log_config(log_config_append): try: logging.config.fileConfig(log_config_append, disable_existing_loggers=False) except moves.configparser.Error as exc: raise LogConfigError(log_config_append, str(exc)) def setup(product_name, version='unknown'): """Setup logging.""" if CONF.log_config_append: _load_log_config(CONF.log_config_append) else: _setup_logging_from_conf(product_name, version) sys.excepthook = _create_logging_excepthook(product_name) def set_defaults(logging_context_format_string): cfg.set_defaults(log_opts, logging_context_format_string= logging_context_format_string) def _find_facility_from_conf(): facility_names = logging.handlers.SysLogHandler.facility_names facility = getattr(logging.handlers.SysLogHandler, CONF.syslog_log_facility, None) if facility is None and CONF.syslog_log_facility in facility_names: facility = facility_names.get(CONF.syslog_log_facility) if facility is None: valid_facilities = facility_names.keys() consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] valid_facilities.extend(consts) raise TypeError(_('syslog facility must be one of: %s') % ', '.join("'%s'" % fac for fac in valid_facilities)) return facility class RFCSysLogHandler(logging.handlers.SysLogHandler): def __init__(self, *args, **kwargs): self.binary_name = _get_binary_name() super(RFCSysLogHandler, self).__init__(*args, **kwargs) def format(self, record): msg = super(RFCSysLogHandler, self).format(record) msg = self.binary_name + ' ' + msg return msg def _setup_logging_from_conf(project, version): log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) if CONF.use_syslog: facility = _find_facility_from_conf() # TODO(bogdando) use the format provided by RFCSysLogHandler # after existing syslog format deprecation in J if CONF.use_syslog_rfc_format: syslog = RFCSysLogHandler(address='/dev/log', facility=facility) else: syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) elif not logpath: # pass sys.stdout as a positional argument # python2.6 calls the argument strm, in 2.7 it's stream streamlog = logging.StreamHandler(sys.stdout) log_root.addHandler(streamlog) if CONF.publish_errors: handler = importutils.import_object( "pycadf.openstack.common.log_handler.PublishErrorsHandler", logging.ERROR) log_root.addHandler(handler) datefmt = CONF.log_date_format for handler in log_root.handlers: # NOTE(alaski): CONF.log_format overrides everything currently. This # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) log_root.info('Deprecated: log_format is now deprecated and will ' 'be removed in the next release') else: handler.setFormatter(ContextFormatter(project=project, version=version, datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING) for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) _loggers = {} def getLogger(name='unknown', version='unknown'): if name not in _loggers: _loggers[name] = ContextAdapter(logging.getLogger(name), name, version) return _loggers[name] def getLazyLogger(name='unknown', version='unknown'): """Returns lazy logger. Creates a pass-through logger that does not create the real logger until it is really needed and delegates all calls to the real logger once it is created. """ return LazyAdapter(name, version) class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" def __init__(self, logger, level=logging.INFO): self.logger = logger self.level = level def write(self, msg): self.logger.log(self.level, msg.rstrip()) class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string and logging_default_format_string. You can also specify logging_debug_format_suffix to append extra formatting if the log level is debug. For information about what variables are available for the formatter see: http://docs.python.org/library/logging.html#formatter If available, uses the context value stored in TLS - local.store.context """ def __init__(self, *args, **kwargs): """Initialize ContextFormatter instance Takes additional keyword arguments which can be used in the message format string. :keyword project: project name :type project: string :keyword version: project version :type version: string """ self.project = kwargs.pop('project', 'unknown') self.version = kwargs.pop('version', 'unknown') logging.Formatter.__init__(self, *args, **kwargs) def format(self, record): """Uses contextstring if request_id is set, otherwise default.""" # store project info record.project = self.project record.version = self.version # store request info context = getattr(local.store, 'context', None) if context: d = _dictify_context(context) for k, v in d.items(): setattr(record, k, v) # NOTE(sdague): default the fancier formatting params # to an empty string so we don't throw an exception if # they get used for key in ('instance', 'color'): if key not in record.__dict__: record.__dict__[key] = '' if record.__dict__.get('request_id', None): self._fmt = CONF.logging_context_format_string else: self._fmt = CONF.logging_default_format_string if (record.levelno == logging.DEBUG and CONF.logging_debug_format_suffix): self._fmt += " " + CONF.logging_debug_format_suffix # Cache this on the record, Logger will respect our formatted copy if record.exc_info: record.exc_text = self.formatException(record.exc_info, record) return logging.Formatter.format(self, record) def formatException(self, exc_info, record=None): """Format exception output with CONF.logging_exception_prefix.""" if not record: return logging.Formatter.formatException(self, exc_info) stringbuffer = moves.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') stringbuffer.close() if CONF.logging_exception_prefix.find('%(asctime)') != -1: record.asctime = self.formatTime(record, self.datefmt) formatted_lines = [] for line in lines: pl = CONF.logging_exception_prefix % record.__dict__ fl = '%s%s' % (pl, line) formatted_lines.append(fl) return '\n'.join(formatted_lines) class ColorHandler(logging.StreamHandler): LEVEL_COLORS = { logging.DEBUG: '\033[00;32m', # GREEN logging.INFO: '\033[00;36m', # CYAN logging.AUDIT: '\033[01;36m', # BOLD CYAN logging.WARN: '\033[01;33m', # BOLD YELLOW logging.ERROR: '\033[01;31m', # BOLD RED logging.CRITICAL: '\033[01;31m', # BOLD RED } def format(self, record): record.color = self.LEVEL_COLORS[record.levelno] return logging.StreamHandler.format(self, record) class DeprecatedConfig(Exception): message = _("Fatal call to deprecated config: %(msg)s") def __init__(self, msg): super(Exception, self).__init__(self.message % dict(msg=msg)) pycadf-0.4.1/pycadf/openstack/common/excutils.py0000664000175400017540000000717112301705746023072 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # Copyright 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Exception related utilities. """ import logging import sys import time import traceback import six from pycadf.openstack.common.gettextutils import _ class save_and_reraise_exception(object): """Save current exception, run some code and then re-raise. In some cases the exception context can be cleared, resulting in None being attempted to be re-raised after an exception handler is run. This can happen when eventlet switches greenthreads or when running an exception handler, code raises and catches an exception. In both cases the exception context will be cleared. To work around this, we save the exception state, run handler code, and then re-raise the original exception. If another exception occurs, the saved exception is logged and the new exception is re-raised. In some cases the caller may not want to re-raise the exception, and for those circumstances this context provides a reraise flag that can be used to suppress the exception. For example:: except Exception: with save_and_reraise_exception() as ctxt: decide_if_need_reraise() if not should_be_reraised: ctxt.reraise = False """ def __init__(self): self.reraise = True def __enter__(self): self.type_, self.value, self.tb, = sys.exc_info() return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: logging.error(_('Original exception being dropped: %s'), traceback.format_exception(self.type_, self.value, self.tb)) return False if self.reraise: six.reraise(self.type_, self.value, self.tb) def forever_retry_uncaught_exceptions(infunc): def inner_func(*args, **kwargs): last_log_time = 0 last_exc_message = None exc_count = 0 while True: try: return infunc(*args, **kwargs) except Exception as exc: this_exc_message = six.u(str(exc)) if this_exc_message == last_exc_message: exc_count += 1 else: exc_count = 1 # Do not log any more frequently than once a minute unless # the exception message changes cur_time = int(time.time()) if (cur_time - last_log_time > 60 or this_exc_message != last_exc_message): logging.exception( _('Unexpected exception occurred %d time(s)... ' 'retrying.') % exc_count) last_log_time = cur_time last_exc_message = this_exc_message exc_count = 0 # This should be a very rare event. In case it isn't, do # a sleep. time.sleep(1) return inner_func pycadf-0.4.1/pycadf/openstack/common/local.py0000664000175400017540000000321512301705746022317 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Local storage of variables using weak references""" import threading import weakref class WeakLocal(threading.local): def __getattribute__(self, attr): rval = super(WeakLocal, self).__getattribute__(attr) if rval: # NOTE(mikal): this bit is confusing. What is stored is a weak # reference, not the value itself. We therefore need to lookup # the weak reference and return the inner value here. rval = rval() return rval def __setattr__(self, attr, value): value = weakref.ref(value) return super(WeakLocal, self).__setattr__(attr, value) # NOTE(mikal): the name "store" should be deprecated in the future store = WeakLocal() # A "weak" store uses weak references and allows an object to fall out of scope # when it falls out of scope in the code that uses the thread local storage. A # "strong" store will hold a reference to the object so that it never falls out # of scope. weak_store = WeakLocal() strong_store = threading.local() pycadf-0.4.1/pycadf/openstack/common/fixture/0000775000175400017540000000000012301706030022323 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/openstack/common/fixture/moxstubout.py0000664000175400017540000000233612301705746025147 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from six.moves import mox # noqa class MoxStubout(fixtures.Fixture): """Deal with code around mox and stubout as a fixture.""" def setUp(self): super(MoxStubout, self).setUp() # emulate some of the mox stuff, we can't use the metaclass # because it screws with our generators self.mox = mox.Mox() self.stubs = self.mox.stubs self.addCleanup(self.mox.UnsetStubs) self.addCleanup(self.mox.VerifyAll) pycadf-0.4.1/pycadf/openstack/common/fixture/logging.py0000664000175400017540000000213512301705746024341 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures def get_logging_handle_error_fixture(): """returns a fixture to make logging raise formatting exceptions. Usage: self.useFixture(logging.get_logging_handle_error_fixture()) """ return fixtures.MonkeyPatch('logging.Handler.handleError', _handleError) def _handleError(self, record): """Monkey patch for logging.Handler.handleError. The default handleError just logs the error to stderr but we want the option of actually raising an exception. """ raise pycadf-0.4.1/pycadf/openstack/common/fixture/lockutils.py0000664000175400017540000000354112301705746024726 0ustar jenkinsjenkins00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from pycadf.openstack.common import lockutils class LockFixture(fixtures.Fixture): """External locking fixture. This fixture is basically an alternative to the synchronized decorator with the external flag so that tearDowns and addCleanups will be included in the lock context for locking between tests. The fixture is recommended to be the first line in a test method, like so:: def test_method(self): self.useFixture(LockFixture) ... or the first line in setUp if all the test methods in the class are required to be serialized. Something like:: class TestCase(testtools.testcase): def setUp(self): self.useFixture(LockFixture) super(TestCase, self).setUp() ... This is because addCleanups are put on a LIFO queue that gets run after the test method exits. (either by completing or raising an exception) """ def __init__(self, name, lock_file_prefix=None): self.mgr = lockutils.lock(name, lock_file_prefix, True) def setUp(self): super(LockFixture, self).setUp() self.addCleanup(self.mgr.__exit__, None, None, None) self.mgr.__enter__() pycadf-0.4.1/pycadf/openstack/common/fixture/mockpatch.py0000664000175400017540000000313412301705746024664 0ustar jenkinsjenkins00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures import mock class PatchObject(fixtures.Fixture): """Deal with code around mock.""" def __init__(self, obj, attr, new=mock.DEFAULT, **kwargs): self.obj = obj self.attr = attr self.kwargs = kwargs self.new = new def setUp(self): super(PatchObject, self).setUp() _p = mock.patch.object(self.obj, self.attr, self.new, **self.kwargs) self.mock = _p.start() self.addCleanup(_p.stop) class Patch(fixtures.Fixture): """Deal with code around mock.patch.""" def __init__(self, obj, new=mock.DEFAULT, **kwargs): self.obj = obj self.kwargs = kwargs self.new = new def setUp(self): super(Patch, self).setUp() _p = mock.patch(self.obj, self.new, **self.kwargs) self.mock = _p.start() self.addCleanup(_p.stop) pycadf-0.4.1/pycadf/openstack/common/fixture/config.py0000664000175400017540000000576612301705746024175 0ustar jenkinsjenkins00000000000000# # Copyright 2013 Mirantis, Inc. # Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from oslo.config import cfg import six class Config(fixtures.Fixture): """Allows overriding configuration settings for the test. `conf` will be reset on cleanup. """ def __init__(self, conf=cfg.CONF): self.conf = conf def setUp(self): super(Config, self).setUp() # NOTE(morganfainberg): unregister must be added to cleanup before # reset is because cleanup works in reverse order of registered items, # and a reset must occur before unregistering options can occur. self.addCleanup(self._unregister_config_opts) self.addCleanup(self.conf.reset) self._registered_config_opts = {} def config(self, **kw): """Override configuration values. The keyword arguments are the names of configuration options to override and their values. If a `group` argument is supplied, the overrides are applied to the specified configuration option group, otherwise the overrides are applied to the ``default`` group. """ group = kw.pop('group', None) for k, v in six.iteritems(kw): self.conf.set_override(k, v, group) def _unregister_config_opts(self): for group in self._registered_config_opts: self.conf.unregister_opts(self._registered_config_opts[group], group=group) def register_opt(self, opt, group=None): """Register a single option for the test run. Options registered in this manner will automatically be unregistered during cleanup. If a `group` argument is supplied, it will register the new option to that group, otherwise the option is registered to the ``default`` group. """ self.conf.register_opt(opt, group=group) self._registered_config_opts.setdefault(group, set()).add(opt) def register_opts(self, opts, group=None): """Register multiple options for the test run. This works in the same manner as register_opt() but takes a list of options as the first argument. All arguments will be registered to the same group if the ``group`` argument is supplied, otherwise all options will be registered to the ``default`` group. """ for opt in opts: self.register_opt(opt, group=group) pycadf-0.4.1/pycadf/openstack/common/fixture/__init__.py0000664000175400017540000000000012301705746024437 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/openstack/common/__init__.py0000664000175400017540000000010312301705746022755 0ustar jenkinsjenkins00000000000000import six six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) pycadf-0.4.1/pycadf/openstack/__init__.py0000664000175400017540000000000012301705746021461 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/cadftaxonomy.py0000664000175400017540000001056512301705746020450 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pycadf import cadftype TYPE_URI_ACTION = cadftype.CADF_VERSION_1_0_0 + 'action' UNKNOWN = 'unknown' # Commonly used (valid) Event.action values from Nova ACTION_CREATE = 'create' ACTION_READ = 'read' ACTION_UPDATE = 'update' ACTION_DELETE = 'delete' # OpenStack specific, Profile or change CADF spec. to add this action ACTION_LIST = 'read/list' # TODO(mrutkows): Make global using WSGI mechanism ACTION_TAXONOMY = frozenset([ 'backup', 'capture', ACTION_CREATE, 'configure', ACTION_READ, ACTION_LIST, ACTION_UPDATE, ACTION_DELETE, 'monitor', 'start', 'stop', 'deploy', 'undeploy', 'enable', 'disable', 'send', 'receive', 'authenticate', 'authenticate/login', 'revoke', 'renew', 'restore', 'evaluate', 'allow', 'deny', 'notify', UNKNOWN ]) # TODO(mrutkows): validate absolute URIs as well def is_valid_action(value): for type in ACTION_TAXONOMY: if value.startswith(type): return True return False TYPE_URI_OUTCOME = cadftype.CADF_VERSION_1_0_0 + 'outcome' # Valid Event.outcome values OUTCOME_SUCCESS = 'success' OUTCOME_FAILURE = 'failure' OUTCOME_PENDING = 'pending' # TODO(mrutkows): Make global using WSGI mechanism OUTCOME_TAXONOMY = frozenset([ OUTCOME_SUCCESS, OUTCOME_FAILURE, OUTCOME_PENDING, UNKNOWN ]) # TODO(mrutkows): validate absolute URIs as well def is_valid_outcome(value): return value in OUTCOME_TAXONOMY SERVICE_SECURITY = 'service/security' ACCOUNT_USER = 'service/security/account/user' CADF_AUDIT_FILTER = 'service/security/audit/filter' # TODO(mrutkows): Make global using WSGI mechanism RESOURCE_TAXONOMY = frozenset([ 'storage', 'storage/node', 'storage/volume', 'storage/memory', 'storage/container', 'storage/directory', 'storage/database', 'storage/queue', 'compute', 'compute/node', 'compute/cpu', 'compute/machine', 'compute/process', 'compute/thread', 'network', 'network/node', 'network/node/host', 'network/connection', 'network/domain', 'network/cluster', 'service', 'service/oss', 'service/bss', 'service/bss/metering', 'service/composition', 'service/compute', 'service/database', SERVICE_SECURITY, 'service/security/account', ACCOUNT_USER, CADF_AUDIT_FILTER, 'service/storage', 'service/storage/block', 'service/storage/image', 'service/storage/object', 'service/network', 'data', 'data/message', 'data/workload', 'data/workload/app', 'data/workload/service', 'data/workload/task', 'data/workload/job', 'data/file', 'data/file/catalog', 'data/file/log', 'data/template', 'data/package', 'data/image', 'data/module', 'data/config', 'data/directory', 'data/database', 'data/security', 'data/security/account', 'data/security/credential', 'data/security/group', 'data/security/identity', 'data/security/key', 'data/security/license', 'data/security/policy', 'data/security/profile', 'data/security/role', 'data/security/service', 'data/security/account/user', 'data/security/account/user/privilege', 'data/database/alias', 'data/database/catalog', 'data/database/constraints', 'data/database/index', 'data/database/instance', 'data/database/key', 'data/database/routine', 'data/database/schema', 'data/database/sequence', 'data/database/table', 'data/database/trigger', 'data/database/view', UNKNOWN ]) # TODO(mrutkows): validate absolute URIs as well def is_valid_resource(value): for type in RESOURCE_TAXONOMY: if value.startswith(type): return True return False pycadf-0.4.1/pycadf/reporterstep.py0000664000175400017540000000536612301705746020515 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pycadf import cadftype from pycadf import identifier from pycadf import resource from pycadf import timestamp REPORTERSTEP_KEYNAME_ROLE = "role" REPORTERSTEP_KEYNAME_REPORTER = "reporter" REPORTERSTEP_KEYNAME_REPORTERID = "reporterId" REPORTERSTEP_KEYNAME_REPORTERTIME = "reporterTime" #REPORTERSTEP_KEYNAME_ATTACHMENTS = "attachments" REPORTERSTEP_KEYNAMES = [REPORTERSTEP_KEYNAME_ROLE, REPORTERSTEP_KEYNAME_REPORTER, REPORTERSTEP_KEYNAME_REPORTERID, REPORTERSTEP_KEYNAME_REPORTERTIME, #REPORTERSTEP_KEYNAME_ATTACHMENTS ] class Reporterstep(cadftype.CADFAbstractType): role = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_ROLE, lambda x: cadftype.is_valid_reporter_role(x)) reporter = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_REPORTER, (lambda x: isinstance(x, resource.Resource) and x.is_valid())) reporterId = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_REPORTERID, lambda x: identifier.is_valid(x)) reporterTime = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_REPORTERTIME, lambda x: timestamp.is_valid(x)) def __init__(self, role=cadftype.REPORTER_ROLE_MODIFIER, reporterTime=None, reporter=None, reporterId=None): # Reporterstep.role setattr(self, REPORTERSTEP_KEYNAME_ROLE, role) # Reporterstep.reportTime if reporterTime is not None: setattr(self, REPORTERSTEP_KEYNAME_REPORTERTIME, reporterTime) # Reporterstep.reporter if reporter is not None: setattr(self, REPORTERSTEP_KEYNAME_REPORTER, reporter) # Reporterstep.reporterId if reporterId is not None: setattr(self, REPORTERSTEP_KEYNAME_REPORTERID, reporterId) # self validate this cadf:Reporterstep type against schema def is_valid(self): return ( self._isset(REPORTERSTEP_KEYNAME_ROLE) and (self._isset(REPORTERSTEP_KEYNAME_REPORTER) ^ self._isset(REPORTERSTEP_KEYNAME_REPORTERID)) ) pycadf-0.4.1/pycadf/timestamp.py0000664000175400017540000000241112301705746017746 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import pytz import six TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" def get_utc_now(timezone=None): utc_datetime = pytz.utc.localize(datetime.datetime.utcnow()) if timezone is not None: try: utc_datetime = utc_datetime.astimezone(pytz.timezone(timezone)) except Exception: utc_datetime.strftime(TIME_FORMAT) return utc_datetime.strftime(TIME_FORMAT) # TODO(mrutkows): validate any cadf:Timestamp (type) record against # CADF schema def is_valid(value): if not isinstance(value, six.string_types): raise ValueError('Timestamp should be a String') return True pycadf-0.4.1/pycadf/identifier.py0000664000175400017540000000276512301705746020101 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six import uuid from oslo.config import cfg CONF = cfg.CONF opts = [ cfg.StrOpt('namespace', default='openstack', help='namespace prefix for generated id'), ] CONF.register_opts(opts, group='audit') # TODO(mrutkows): make the namespace prefix configurable and have it resolve to # a full openstack namespace/domain value via some declaration (e.g. # "openstack:" == "http:\\www.openstack.org\")... def generate_uuid(): return norm_ns(str(uuid.uuid4())) def norm_ns(str_id): prefix = CONF.audit.namespace + ':' if CONF.audit.namespace else '' return prefix + str_id # TODO(mrutkows): validate any cadf:Identifier (type) record against # CADF schema. This would include schema validation as an optional parm. def is_valid(value): if not isinstance(value, six.string_types): raise TypeError return True pycadf-0.4.1/pycadf/audit/0000775000175400017540000000000012301706030016464 5ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/audit/api.py0000664000175400017540000002752312301705747017636 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import collections import os import re from oslo.config import cfg from six.moves import configparser from six.moves.urllib import parse as urlparse from pycadf import cadftaxonomy as taxonomy from pycadf import cadftype from pycadf import credential from pycadf import endpoint from pycadf import eventfactory as factory from pycadf import host from pycadf import identifier from pycadf import reason from pycadf import reporterstep from pycadf import resource from pycadf import tag from pycadf import timestamp #NOTE(gordc): remove cfg once we move over to this middleware version CONF = cfg.CONF opts = [cfg.StrOpt('api_audit_map', default='api_audit_map.conf', help='File containing mapping for api paths and ' 'service endpoints')] CONF.register_opts(opts, group='audit') AuditMap = collections.namedtuple('AuditMap', ['path_kw', 'custom_actions', 'service_endpoints', 'default_target_endpoint_type']) def _configure_audit_map(cfg_file): """Configure to recognize and map known api paths.""" path_kw = {} custom_actions = {} service_endpoints = {} default_target_endpoint_type = None if cfg_file: try: map_conf = configparser.SafeConfigParser() map_conf.readfp(open(cfg_file)) try: default_target_endpoint_type = \ map_conf.get('DEFAULT', 'target_endpoint_type') except configparser.NoOptionError: pass try: custom_actions = dict(map_conf.items('custom_actions')) except configparser.Error: pass try: path_kw = dict(map_conf.items('path_keywords')) except configparser.Error: pass try: service_endpoints = dict(map_conf.items('service_endpoints')) except configparser.Error: pass except configparser.ParsingError as err: raise PycadfAuditApiConfigError( 'Error parsing audit map file: %s' % err) return AuditMap(path_kw=path_kw, custom_actions=custom_actions, service_endpoints=service_endpoints, default_target_endpoint_type=default_target_endpoint_type) class ClientResource(resource.Resource): def __init__(self, project_id=None, **kwargs): super(ClientResource, self).__init__(**kwargs) if project_id is not None: self.project_id = project_id class KeystoneCredential(credential.Credential): def __init__(self, identity_status=None, **kwargs): super(KeystoneCredential, self).__init__(**kwargs) if identity_status is not None: self.identity_status = identity_status class PycadfAuditApiConfigError(Exception): """Error raised when pyCADF fails to configure correctly.""" class OpenStackAuditApi(object): Service = collections.namedtuple('Service', ['id', 'name', 'type', 'admin_endp', 'public_endp', 'private_endp']) def __init__(self, map_file=None): if map_file is None: map_file = CONF.audit.api_audit_map if not os.path.exists(CONF.audit.api_audit_map): map_file = cfg.CONF.find_file(CONF.audit.api_audit_map) self._MAP = _configure_audit_map(map_file) def _get_action(self, req): """Take a given Request, parse url path to calculate action type. Depending on req.method: if POST: path ends with 'action', read the body and use as action; path ends with known custom_action, take action from config; request ends with known path, assume is create action; request ends with unknown path, assume is update action. if GET: request ends with known path, assume is list action; request ends with unknown path, assume is read action. if PUT, assume update action. if DELETE, assume delete action. if HEAD, assume read action. """ path = req.path[:-1] if req.path.endswith('/') else req.path url_ending = path[path.rfind('/') + 1:] method = req.method if url_ending + '/' + method.lower() in self._MAP.custom_actions: action = self._MAP.custom_actions[url_ending + '/' + method.lower()] elif url_ending in self._MAP.custom_actions: action = self._MAP.custom_actions[url_ending] elif method == 'POST': if url_ending == 'action': try: if req.json: body_action = list(req.json.keys())[0] action = taxonomy.ACTION_UPDATE + '/' + body_action else: action = taxonomy.ACTION_CREATE except ValueError: action = taxonomy.ACTION_CREATE elif url_ending not in self._MAP.path_kw: action = taxonomy.ACTION_UPDATE else: action = taxonomy.ACTION_CREATE elif method == 'GET': if url_ending in self._MAP.path_kw: action = taxonomy.ACTION_LIST else: action = taxonomy.ACTION_READ elif method == 'PUT': action = taxonomy.ACTION_UPDATE elif method == 'DELETE': action = taxonomy.ACTION_DELETE elif method == 'HEAD': action = taxonomy.ACTION_READ else: action = taxonomy.UNKNOWN return action def _get_service_info(self, endp): service = self.Service( type=self._MAP.service_endpoints.get( endp['type'], taxonomy.UNKNOWN), name=endp['name'], id=identifier.norm_ns(endp['endpoints'][0]['id']), admin_endp=endpoint.Endpoint( name='admin', url=endp['endpoints'][0]['adminURL']), private_endp=endpoint.Endpoint( name='private', url=endp['endpoints'][0]['internalURL']), public_endp=endpoint.Endpoint( name='public', url=endp['endpoints'][0]['publicURL'])) return service def _build_typeURI(self, req, service_type): type_uri = '' prev_key = None for key in re.split('/', req.path): if key in self._MAP.path_kw: type_uri += '/' + key elif prev_key in self._MAP.path_kw: type_uri += '/' + self._MAP.path_kw[prev_key] prev_key = key return service_type + type_uri def create_event(self, req, correlation_id): action = self._get_action(req) initiator_host = host.Host(address=req.client_addr, agent=req.user_agent) catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG']) service_info = self.Service(type=taxonomy.UNKNOWN, name=taxonomy.UNKNOWN, id=taxonomy.UNKNOWN, admin_endp=None, private_endp=None, public_endp=None) default_endpoint = None for endp in catalog: admin_urlparse = urlparse.urlparse( endp['endpoints'][0]['adminURL']) public_urlparse = urlparse.urlparse( endp['endpoints'][0]['publicURL']) req_url = urlparse.urlparse(req.host_url) if (req_url.netloc == admin_urlparse.netloc or req_url.netloc == public_urlparse.netloc): service_info = self._get_service_info(endp) break elif (self._MAP.default_target_endpoint_type and endp['type'] == self._MAP.default_target_endpoint_type): default_endpoint = endp else: if default_endpoint: service_info = self._get_service_info(default_endpoint) initiator = ClientResource( typeURI=taxonomy.ACCOUNT_USER, id=identifier.norm_ns(str(req.environ['HTTP_X_USER_ID'])), name=req.environ['HTTP_X_USER_NAME'], host=initiator_host, credential=KeystoneCredential( token=req.environ['HTTP_X_AUTH_TOKEN'], identity_status=req.environ['HTTP_X_IDENTITY_STATUS']), project_id=identifier.norm_ns(req.environ['HTTP_X_PROJECT_ID'])) target_typeURI = (self._build_typeURI(req, service_info.type) if service_info.type != taxonomy.UNKNOWN else service_info.type) target = resource.Resource(typeURI=target_typeURI, id=service_info.id, name=service_info.name) if service_info.admin_endp: target.add_address(service_info.admin_endp) if service_info.private_endp: target.add_address(service_info.private_endp) if service_info.public_endp: target.add_address(service_info.public_endp) event = factory.EventFactory().new_event( eventType=cadftype.EVENTTYPE_ACTIVITY, outcome=taxonomy.OUTCOME_PENDING, action=action, initiator=initiator, target=target, observer=resource.Resource(id='target')) event.requestPath = req.path_qs event.add_tag(tag.generate_name_value_tag('correlation_id', correlation_id)) return event def append_audit_event(self, req): """Append a CADF event to req.environ['CADF_EVENT'] Also, stores model in request for future process and includes a CADF correlation id. """ correlation_id = identifier.generate_uuid() req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id event = self.create_event(req, correlation_id) setattr(req, 'cadf_model', event) req.environ['CADF_EVENT'] = event.as_dict() def mod_audit_event(self, req, response): """Modifies CADF event in request based on response. If no event exists, a new event is created. """ if response: if response.status_int >= 200 and response.status_int < 400: result = taxonomy.OUTCOME_SUCCESS else: result = taxonomy.OUTCOME_FAILURE else: result = taxonomy.UNKNOWN if hasattr(req, 'cadf_model'): req.cadf_model.add_reporterstep( reporterstep.Reporterstep( role=cadftype.REPORTER_ROLE_MODIFIER, reporter=resource.Resource(id='target'), reporterTime=timestamp.get_utc_now())) else: self.append_audit_event(req) req.cadf_model.outcome = result if response: req.cadf_model.reason = \ reason.Reason(reasonType='HTTP', reasonCode=str(response.status_int)) req.environ['CADF_EVENT'] = req.cadf_model.as_dict() pycadf-0.4.1/pycadf/audit/__init__.py0000664000175400017540000000000012301705746020600 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/tag.py0000664000175400017540000000214412301705746016521 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six def generate_name_value_tag(name, value): # TODO(mrutkows): detailed test/concatenation of independent values # into a URI if name is None or value is None: raise ValueError('Invalid name and/or value. Values cannot be None') tag = name + "?value=" + value return tag # TODO(mrutkows): validate any Tag's name?value= format def is_valid(value): if not isinstance(value, six.string_types): raise TypeError return True pycadf-0.4.1/pycadf/credential.py0000664000175400017540000000310612301705746020057 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright © 2013 IBM Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype from pycadf import utils TYPE_URI_CRED = cadftype.CADF_VERSION_1_0_0 + 'credential' CRED_KEYNAME_TYPE = "type" CRED_KEYNAME_TOKEN = "token" CRED_KEYNAMES = [CRED_KEYNAME_TYPE, CRED_KEYNAME_TOKEN] class Credential(cadftype.CADFAbstractType): type = cadftype.ValidatorDescriptor( CRED_KEYNAME_TYPE, lambda x: isinstance(x, six.string_types)) token = cadftype.ValidatorDescriptor( CRED_KEYNAME_TOKEN, lambda x: isinstance(x, six.string_types)) def __init__(self, token, type=None): # Credential.token setattr(self, CRED_KEYNAME_TOKEN, utils.mask_value(token)) # Credential.type if type is not None: setattr(self, CRED_KEYNAME_TYPE, type) # TODO(mrutkows): validate this cadf:Credential type against schema def is_valid(self): # TODO(mrutkows): validate specific attribute type/format return self._isset(CRED_KEYNAME_TOKEN) pycadf-0.4.1/pycadf/path.py0000664000175400017540000000232612301705746016704 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from pycadf import cadftype class Path(cadftype.CADFAbstractType): def set_path_absolute(self): # TODO(mrutkows): validate absolute path format, else Type error raise NotImplementedError() def set_path_relative(self): # TODO(mrutkows); validate relative path format, else Type error raise NotImplementedError() # TODO(mrutkows): validate any cadf:Path (type) record against CADF schema @staticmethod def is_valid(value): if not isinstance(value, six.string_types): raise TypeError return True pycadf-0.4.1/pycadf/__init__.py0000664000175400017540000000000012301705746017472 0ustar jenkinsjenkins00000000000000pycadf-0.4.1/pycadf/eventfactory.py0000664000175400017540000000457412301705746020470 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # # Copyright 2013 IBM Corp. # # Author: Matt Rutkowski # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pycadf import cadftype from pycadf import event ERROR_UNKNOWN_EVENTTYPE = 'Unknown CADF EventType requested on factory method' class EventFactory(object): """Factory class to create different required attributes for the following CADF event types: 'activity': for tracking any interesting system activities for audit 'monitor': Events that carry Metrics and Measurements and support standards such as NIST 'control': For audit events that are based upon (security) policies and reflect some policy decision. """ def new_event(self, eventType=cadftype.EVENTTYPE_ACTIVITY, **kwargs): # for now, construct a base ('activity') event as the default event_val = event.Event(**kwargs) if not cadftype.is_valid_eventType(eventType): raise ValueError(ERROR_UNKNOWN_EVENTTYPE) event_val.eventType = eventType # TODO(mrutkows): CADF is only being used for basic # 'activity' auditing (on APIs), An IF-ELIF will # become more meaningful as we add support for other # event types. # elif eventType == cadftype.EVENTTYPE_MONITOR: # # TODO(mrutkows): If we add support for standard (NIST) # # monitoring messages, we will would have a "monitor" # # subclass of the CADF Event type and create it here # event_val.set_eventType(cadftype.EVENTTYPE_MONITOR) # elif eventType == cadftype.EVENTTYPE_CONTROL: # # TODO(mrutkows): If we add support for standard (NIST) # # monitoring messages, we will would have a "control" # # subclass of the CADF Event type and create it here # event_val.set_eventType(cadftype.EVENTTYPE_CONTROL) return event_val pycadf-0.4.1/AUTHORS0000664000175400017540000000000112301706030015147 0ustar jenkinsjenkins00000000000000 pycadf-0.4.1/tox.ini0000664000175400017540000000112312301705746015435 0ustar jenkinsjenkins00000000000000[tox] minversion = 1.6 skipsdist = True envlist = py26,py27,py33,pep8 [testenv] setenv = VIRTUAL_ENV={envdir} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt install_command = pip install -U {opts} {packages} usedevelop = True commands = python setup.py testr --slowest --testr-args='{posargs}' [testenv:pep8] commands = flake8 [testenv:cover] setenv = VIRTUAL_ENV={envdir} commands = python setup.py testr --coverage [testenv:venv] commands = {posargs} [flake8] show-source = True exclude = .tox,dist,doc,*.egg,build,./pycadf/openstack/common builtins = _