panko-7.0.0/ 0000775 0001750 0001750 00000000000 13551610251 012647 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/bindep.txt 0000664 0001750 0001750 00000000435 13551610116 014653 0 ustar zuul zuul 0000000 0000000 mongodb [platform:dpkg]
mongodb-server [platform:rpm]
mysql-server
mysql-client [platform:dpkg]
mysql [platform:rpm]
postgresql
postgresql-client [platform:dpkg]
postgresql-devel [platform:rpm]
postgresql-server [platform:rpm]
libpq-dev [platform:dpkg]
python37 [platform:rpm py37]
panko-7.0.0/PKG-INFO 0000664 0001750 0001750 00000003326 13551610251 013750 0 ustar zuul zuul 0000000 0000000 Metadata-Version: 1.1
Name: panko
Version: 7.0.0
Summary: Event storage publisher and API for Ceilometer
Home-page: https://docs.openstack.org/panko/latest/
Author: OpenStack
Author-email: openstack-dev@lists.openstack.org
License: UNKNOWN
Description: panko
=====
-------------
Documentation
-------------
Documentation for the project can be found at:
https://docs.openstack.org/panko/latest/
Launchpad Projects
------------------
- Server: https://launchpad.net/panko
- Client: https://launchpad.net/python-pankoclient
Code Repository
---------------
- Server: https://github.com/openstack/panko
- Client: https://github.com/openstack/python-pankoclient
Bug Tracking
------------
- Bugs: https://bugs.launchpad.net/panko
IRC
---
IRC Channel: #openstack-telemetry on Freenode.
Release notes
-------------
Release notes: https://docs.openstack.org/releasenotes/panko/
Platform: UNKNOWN
Classifier: Environment :: OpenStack
Classifier: Intended Audience :: Information Technology
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: POSIX :: Linux
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Topic :: System :: Monitoring
panko-7.0.0/MAINTAINERS 0000664 0001750 0001750 00000000626 13551610116 014350 0 ustar zuul zuul 0000000 0000000 = Generalist Code Reviewers =
The current members of panko-core are listed here:
https://launchpad.net/~panko-drivers/+members#active
This group can +2 and approve patches in Panko. However, they may
choose to seek feedback from the appropriate specialist maintainer before
approving a patch if it is in any way controversial or risky.
= IRC handles of maintainers =
gordc
jd__
liusheng
pradk
sileht
panko-7.0.0/README.rst 0000664 0001750 0001750 00000001160 13551610116 014334 0 ustar zuul zuul 0000000 0000000 panko
=====
-------------
Documentation
-------------
Documentation for the project can be found at:
https://docs.openstack.org/panko/latest/
Launchpad Projects
------------------
- Server: https://launchpad.net/panko
- Client: https://launchpad.net/python-pankoclient
Code Repository
---------------
- Server: https://github.com/openstack/panko
- Client: https://github.com/openstack/python-pankoclient
Bug Tracking
------------
- Bugs: https://bugs.launchpad.net/panko
IRC
---
IRC Channel: #openstack-telemetry on Freenode.
Release notes
-------------
Release notes: https://docs.openstack.org/releasenotes/panko/
panko-7.0.0/.coveragerc 0000664 0001750 0001750 00000000130 13551610116 014762 0 ustar zuul zuul 0000000 0000000 [run]
branch = True
source = panko
omit = panko/tests/*
[report]
ignore_errors = True
panko-7.0.0/tools/ 0000775 0001750 0001750 00000000000 13551610251 014007 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/tools/make_test_event_data.py 0000775 0001750 0001750 00000006564 13551610116 020545 0 ustar zuul zuul 0000000 0000000 #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command line tool for creating event test data for Panko.
Usage:
Generate testing data for e.g. for default time span
. .tox/py27/bin/activate
./tools/make_test_event_data.py --event_types 3
"""
import argparse
import datetime
import random
from oslo_utils import timeutils
from oslo_utils import uuidutils
from panko import service
from panko import storage
from panko.storage import models
def make_test_data(conn, start, end, interval, event_types):
# Compute start and end timestamps for the new data.
if isinstance(start, datetime.datetime):
timestamp = start
else:
timestamp = timeutils.parse_strtime(start)
if not isinstance(end, datetime.datetime):
end = timeutils.parse_strtime(end)
increment = datetime.timedelta(minutes=interval)
print('Adding new events')
n = 0
while timestamp <= end:
data = []
for i in range(event_types):
traits = [models.Trait('id1_%d' % i, 1, uuidutils.generate_uuid()),
models.Trait('id2_%d' % i, 2, random.randint(1, 10)),
models.Trait('id3_%d' % i, 3, random.random()),
models.Trait('id4_%d' % i, 4, timestamp)]
data.append(models.Event(uuidutils.generate_uuid(),
'event_type%d' % i,
timestamp,
traits,
{}))
n += 1
conn.record_events(data)
timestamp = timestamp + increment
print('Added %d new events' % n)
def main():
conf = service.prepare_service()
parser = argparse.ArgumentParser(
description='generate event data',
)
parser.add_argument(
'--interval',
default=10,
type=int,
help='The period between events, in minutes.',
)
parser.add_argument(
'--start',
default=31,
type=int,
help='The number of days in the past to start timestamps.',
)
parser.add_argument(
'--end',
default=2,
type=int,
help='The number of days into the future to continue timestamps.',
)
parser.add_argument(
'--event_types',
default=3,
type=int,
help='The number of unique event_types.',
)
args = parser.parse_args()
# Connect to the event database
conn = storage.get_connection_from_config(conf)
# Compute the correct time span
start = datetime.datetime.utcnow() - datetime.timedelta(days=args.start)
end = datetime.datetime.utcnow() + datetime.timedelta(days=args.end)
make_test_data(conn=conn,
start=start,
end=end,
interval=args.interval,
event_types=args.event_types)
if __name__ == '__main__':
main()
panko-7.0.0/tools/__init__.py 0000664 0001750 0001750 00000000000 13551610116 016106 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/test-requirements.txt 0000664 0001750 0001750 00000001527 13551610116 017115 0 ustar zuul zuul 0000000 0000000 # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
coverage>=3.6 # Apache-2.0
elasticsearch>=1.3.0 # Apache-2.0
fixtures<2.0,>=1.3.1 # Apache-2.0/BSD
happybase!=0.7,>=0.5,<1.0.0;python_version=='2.7' # MIT
mock>=1.2 # BSD
PyMySQL>=0.6.2 # MIT License
# Docs Requirements
oslotest>=1.10.0 # Apache-2.0
psycopg2>=2.5 # LGPL/ZPL
pymongo!=3.1,>=3.0.2 # Apache-2.0
python-subunit>=0.0.18 # Apache-2.0/BSD
sphinx>=1.6.2 # BSD
openstackdocstheme>=1.11.0 # Apache-2.0
sphinxcontrib-httpdomain # BSD
sphinxcontrib-pecanwsme>=0.8 # Apache-2.0
stestr>=2.0.0 # Apache-2.0
testtools>=1.4.0 # MIT
gabbi>=1.11.0 # Apache-2.0
os-testr>=0.4.1 # Apache-2.0
WebTest>=2.0 # MIT
pifpaf>=0.0.11
reno
sqlalchemy-utils
panko-7.0.0/.stestr.conf 0000664 0001750 0001750 00000000206 13551610116 015116 0 ustar zuul zuul 0000000 0000000 [DEFAULT]
test_path=${OS_TEST_PATH:-./panko/tests}
top_dir=./
group_regex=(gabbi\.(suitemaker|driver)\.test_gabbi_(?:prefix_|)[^_]+)_
panko-7.0.0/doc/ 0000775 0001750 0001750 00000000000 13551610251 013414 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/requirements.txt 0000664 0001750 0001750 00000000750 13551610116 016702 0 ustar zuul zuul 0000000 0000000 # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
sphinx!=1.6.6,!=1.6.7,>=1.6.2,<2.0.0;python_version=='2.7' # BSD
sphinx!=1.6.6,!=1.6.7,!=2.1.0,>=1.6.2;python_version>='3.4' # BSD
openstackdocstheme>=1.20.0 # Apache-2.0
sphinxcontrib-pecanwsme>=0.8.0 # Apache-2.0
sphinxcontrib-httpdomain>=1.6.1 # BSD
reno>=2.7.0 # Apache-2.0
panko-7.0.0/doc/source/ 0000775 0001750 0001750 00000000000 13551610251 014714 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/contributor/ 0000775 0001750 0001750 00000000000 13551610251 017266 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/contributor/gmr.rst 0000664 0001750 0001750 00000005753 13551610116 020617 0 ustar zuul zuul 0000000 0000000 ..
Copyright (c) 2014 OpenStack Foundation
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Guru Meditation Reports
=======================
Panko contains a mechanism whereby developers and system administrators
can generate a report about the state of a running Panko executable. This
report is called a *Guru Meditation Report* (*GMR* for short).
Generating a GMR
----------------
A *GMR* can be generated by sending the *USR1* signal to any Panko process
with support (see below). The *GMR* will then be outputted standard error for
that particular process.
For example, suppose that ``panko-api`` has process id ``8675``, and
was run with ``2>/var/log/panko/panko-api.log``. Then,
``kill -USR1 8675`` will trigger the Guru Meditation report to be printed to
``/var/log/panko/panko-api.log``.
Structure of a GMR
------------------
The *GMR* is designed to be extensible; any particular executable may add its
own sections. However, the base *GMR* consists of several sections:
Package
Shows information about the package to which this process belongs, including
version information
Threads
Shows stack traces and thread ids for each of the threads within this process
Green Threads
Shows stack traces for each of the green threads within this process (green
threads don't have thread ids)
Configuration
Lists all the configuration options currently accessible via the CONF object
for the current process
Adding Support for GMRs to New Executables
------------------------------------------
Adding support for a *GMR* to a given executable is fairly easy.
First import the module (currently residing in oslo-incubator), as well as the
Panko version module:
.. code-block:: python
from oslo_reports import guru_meditation_report as gmr
from panko import version
Then, register any additional sections (optional):
.. code-block:: python
TextGuruMeditation.register_section('Some Special Section',
some_section_generator)
Finally (under main), before running the "main loop" of the executable (usually
``service.server(server)`` or something similar), register the *GMR* hook:
.. code-block:: python
TextGuruMeditation.setup_autorun(version)
Extending the GMR
-----------------
As mentioned above, additional sections can be added to the GMR for a
particular executable. For more information, see the inline documentation
about oslo.reports:
`oslo.reports `_
panko-7.0.0/doc/source/contributor/contributing.rst 0000664 0001750 0001750 00000002444 13551610116 022533 0 ustar zuul zuul 0000000 0000000 ..
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. _contributing:
=====================
Contributing to Panko
=====================
Panko follows the same workflow as other OpenStack projects. To start
contributing to Panko, please follow the workflow found here_.
.. _here: https://wiki.openstack.org/wiki/Gerrit_Workflow
Project Hosting Details
=======================
:Bug tracker: https://bugs.launchpad.net/panko
:Mailing list: http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-dev (prefix subjects with ``[Panko]`` for faster responses)
:Contribution Guide: https://docs.openstack.org/panko/latest/contributor/index.html
:Code Hosting: https://opendev.org/openstack/panko/
:Code Review: https://review.opendev.org/#/q/status:open+project:openstack/panko,n,z
panko-7.0.0/doc/source/contributor/testing.rst 0000664 0001750 0001750 00000005271 13551610116 021502 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2012 New Dream Network, LLC (DreamHost)
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
=================
Running the Tests
=================
Panko includes an extensive set of automated unit tests which are
run through tox_.
1. Install ``tox``::
$ sudo pip install tox
2. On Ubuntu install ``mongodb`` and ``libmysqlclient-dev`` packages::
$ sudo apt-get install mongodb
$ sudo apt-get install libmysqlclient-dev
For Fedora20 there is no ``libmysqlclient-dev`` package, so you’ll need
to install ``mariadb-devel.x86-64`` (or ``mariadb-devel.i386``) instead::
$ sudo yum install mongodb
$ sudo yum install mariadb-devel.x86_64
3. Install the test dependencies::
$ sudo pip install -r /opt/stack/panko/test-requirements.txt
4. Run the unit and code-style tests::
$ cd /opt/stack/panko
$ tox -e py27,pep8
As tox is a wrapper around testr, it also accepts the same flags as testr.
See the `testr documentation`_ for details about these additional flags.
.. _testr documentation: https://testrepository.readthedocs.org/en/latest/MANUAL.html
Use a double hyphen to pass options to testr. For example, to run only tests under tests/api/v2::
$ tox -e py27 -- api.v2
To debug tests (ie. break into pdb debugger), you can use ''debug'' tox
environment. Here's an example, passing the name of a test since you'll
normally only want to run the test that hits your breakpoint::
$ tox -e debug panko.tests.test_bin
For reference, the ``debug`` tox environment implements the instructions
here: https://wiki.openstack.org/wiki/Testr#Debugging_.28pdb.29_Tests
5. There is a growing suite of tests which use a tool called `gabbi`_ to
test and validate the behavior of the Panko API. These tests are run
when using the usual ``py27`` tox target but if desired they can be run by
themselves::
$ tox -e gabbi
The YAML files used to drive the gabbi tests can be found in
``panko/tests/functional/gabbi/gabbits``. If you are adding to or adjusting the
API you should consider adding tests here.
.. _gabbi: https://gabbi.readthedocs.org/
.. seealso::
* tox_
.. _tox: http://tox.testrun.org/latest/
panko-7.0.0/doc/source/contributor/index.rst 0000664 0001750 0001750 00000000517 13551610116 021132 0 ustar zuul zuul 0000000 0000000 ==================
Contribution Guide
==================
In the Contribution Guide, you will find documented policies for
developing with Panko. This includes the processes we use for
bugs, contributor onboarding, core reviewer memberships, and other
procedural items.
.. toctree::
:maxdepth: 2
contributing
testing
gmr
panko-7.0.0/doc/source/configuration/ 0000775 0001750 0001750 00000000000 13551610251 017563 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/configuration/sample_policy.rst 0000664 0001750 0001750 00000001372 13551610116 023160 0 ustar zuul zuul 0000000 0000000 ===================
Panko Sample Policy
===================
The following is a sample panko policy file that has been auto-generated
from default policy values in code. If you're using the default policies, then
the maintenance of this file is not necessary, and it should not be copied into
a deployment. Doing so will result in duplicate policy definitions. It is here
to help explain which policy operations protect specific panko APIs, but it
is not suggested to copy and paste into a deployment unless you're planning on
providing a different policy for an operation that is not the default.
The sample policy file can also be viewed in
:download:`file form <../_static/panko.policy.yaml.sample>`.
.. literalinclude:: ../_static/panko.policy.yaml.sample
panko-7.0.0/doc/source/webapi/ 0000775 0001750 0001750 00000000000 13551610251 016163 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/webapi/index.rst 0000664 0001750 0001750 00000002413 13551610116 020024 0 ustar zuul zuul 0000000 0000000 =======
Web API
=======
.. toctree::
:maxdepth: 2
v2
You can get API version list via request to endpoint root path. For example::
curl -H "X-AUTH-TOKEN: fa2ec18631f94039a5b9a8b4fe8f56ad" http://127.0.0.1:8977
Sample response::
{
"versions": {
"values": [
{
"id": "v2",
"links": [
{
"href": "http://127.0.0.1:8977/v2",
"rel": "self"
},
{
"href": "https://docs.openstack.org/",
"rel": "describedby",
"type": "text/html"
}
],
"media-types": [
{
"base": "application/json",
"type": "application/vnd.openstack.telemetry-v2+json"
},
{
"base": "application/xml",
"type": "application/vnd.openstack.telemetry-v2+xml"
}
],
"status": "stable",
"updated": "2013-02-13T00:00:00Z"
}
]
}
}
panko-7.0.0/doc/source/webapi/v2.rst 0000664 0001750 0001750 00000002572 13551610116 017252 0 ustar zuul zuul 0000000 0000000 .. docbookrestapi
==========
V2 Web API
==========
Capabilities
============
The Capabilities API allows you to directly discover which functions from the
V2 API functionality, including the selectable aggregate functions, are
supported by the currently configured storage driver. A capabilities query
returns a flattened dictionary of properties with associated boolean values -
a 'False' or absent value means that the corresponding feature is not
available in the backend.
.. rest-controller:: panko.api.controllers.v2.capabilities:CapabilitiesController
:webprefix: /v2/capabilities
.. autotype:: panko.api.controllers.v2.capabilities.Capabilities
:members:
Events and Traits
=================
.. rest-controller:: panko.api.controllers.v2.events:EventTypesController
:webprefix: /v2/event_types
.. rest-controller:: panko.api.controllers.v2.events:TraitsController
:webprefix: /v2/event_types/(event_type)/traits
.. rest-controller:: panko.api.controllers.v2.events:EventsController
:webprefix: /v2/events
.. autotype:: panko.api.controllers.v2.events.Event
:members:
.. autotype:: panko.api.controllers.v2.events.Trait
:members:
.. autotype:: panko.api.controllers.v2.events.TraitDescription
:members:
Filtering Queries
=================
See :ref:`api-queries` for how to query the API.
.. autotype:: panko.api.controllers.v2.events.EventQuery
:members:
panko-7.0.0/doc/source/conf.py 0000664 0001750 0001750 00000022163 13551610116 016217 0 ustar zuul zuul 0000000 0000000 #
# Panko documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 27 11:38:59 2011.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings.
# They can be extensions coming with Sphinx (named 'sphinx.ext.*')
# or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.pecanwsme.rest',
'sphinxcontrib.httpdomain',
'openstackdocstheme',
'oslo_policy.sphinxpolicygen'
]
policy_generator_config_file = '../../etc/panko/panko-policy-generator.conf'
sample_policy_basename = '_static/panko'
wsme_protocols = ['restjson', 'restxml']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates = []
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# openstackdocstheme options
repository_name = 'openstack/panko'
bug_project = 'panko'
bug_tag = ''
project = u'Panko'
copyright = u'2012-2015, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['**/#*', '**~', '**/#*#']
# The reST default role (used for this markup: `text`)
# to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
primary_domain = 'py'
nitpicky = False
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme_path = ['.']
# html_theme = '_theme'
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {
# "nosidebar": "false"
#}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%Y-%m-%d %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pankodoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
'makeindex': '',
'printindex': '',
'preamble': r'\setcounter{tocdepth}{3}',
'maxlistdepth': '10',
}
# Disable usage of xindy https://bugzilla.redhat.com/show_bug.cgi?id=1643664
latex_use_xindy = False
# Disable smartquotes, they don't work in latex
smartquotes_excludes = {'builders': ['latex']}
latex_domain_indices = False
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'doc-panko.tex', u'Panko Documentation',
u'OpenStack Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'panko', u'Panko Documentation',
[u'OpenStack'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Panko', u'Panko Documentation', u'OpenStack',
'Panko', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output --------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Panko'
epub_author = u'OpenStack'
epub_publisher = u'OpenStack'
epub_copyright = u'2012-2015, OpenStack'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
panko-7.0.0/doc/source/api/ 0000775 0001750 0001750 00000000000 13551610251 015465 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/api/index.rst 0000664 0001750 0001750 00000000142 13551610116 017323 0 ustar zuul zuul 0000000 0000000 =================
Source Code Index
=================
.. toctree::
:maxdepth: 1
autoindex
panko-7.0.0/doc/source/index.rst 0000664 0001750 0001750 00000002565 13551610116 016565 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2012 Nicolas Barcet for Canonical
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
=====================================
Welcome to the Panko's documentation!
=====================================
The Panko project is an event storage service that provides the ability to
store and querying event data generated by Ceilometer with potentially other
sources.
Panko is a component of the Telemetry project.
This documentation offers information on how Panko works and how to
contribute to the project.
Overview
========
.. toctree::
:maxdepth: 2
install/index
contributor/index
webapi/index
Sample Configuration Files
==========================
.. toctree::
:maxdepth: 2
configuration/sample_policy
.. only:: html
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
panko-7.0.0/doc/source/install/ 0000775 0001750 0001750 00000000000 13551610251 016362 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/doc/source/install/manual.rst 0000664 0001750 0001750 00000007524 13551610116 020401 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2012 Nicolas Barcet for Canonical
2013 New Dream Network, LLC (DreamHost)
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. _installing_manually:
===================
Installing Manually
===================
Storage Backend Installation
============================
This step is a prerequisite for the collector and API services. You may use
one of the listed database backends below to store Panko data.
MongoDB
-------
Follow the instructions to install the MongoDB_ package for your operating
system, then start the service. The required minimum version of MongoDB is
2.4.x. You will also need to have pymongo_ 2.4 installed
To use MongoDB as the storage backend, change the 'database' section in
panko.conf as follows::
[database]
connection = mongodb://username:password@host:27017/panko
SQLalchemy-supported DBs
------------------------
You may alternatively use any SQLAlchemy-supported DB such as
`PostgreSQL` or `MySQL`.
To use MySQL as the storage backend, change the 'database' section in
panko.conf as follows::
[database]
connection = mysql+pymysql://username:password@host/panko?charset=utf8
.. _MongoDB: http://www.mongodb.org/
.. _pymongo: https://pypi.org/project/pymongo/
Installing the API Server
=========================
.. index::
double: installing; API
.. note::
The API server needs to be able to talk to keystone and panko's
database. It is only required if you choose to store data in legacy
database or if you inject new samples via REST API.
1. Clone the panko git repository to the server::
$ cd /opt/stack
$ git clone https://opendev.org/openstack/panko.git
2. As a user with ``root`` permissions or ``sudo`` privileges, run the
panko installer::
$ cd panko
$ sudo python setup.py install
3. Create a service for panko in keystone::
$ openstack service create event --name panko \
--description "Panko Service"
4. Create an endpoint in keystone for panko::
$ openstack endpoint create $PANKO_SERVICE \
--region RegionOne \
--publicurl "http://$SERVICE_HOST:8977" \
--adminurl "http://$SERVICE_HOST:8977" \
--internalurl "http://$SERVICE_HOST:8977"
.. note::
PANKO_SERVICE is the id of the service created by the first command
and SERVICE_HOST is the host where the Panko API is running. The
default port value for panko API is 8977. If the port value
has been customized, adjust accordingly.
5. Choose and start the API server.
Panko includes the ``panko-api`` command. This can be
used to run the API server. For smaller or proof-of-concept
installations this is a reasonable choice. For larger installations it
is strongly recommended to install the API server in a WSGI host
such as mod_wsgi (see :doc:`mod_wsgi`). Doing so will provide better
performance and more options for making adjustments specific to the
installation environment.
If you are using the ``panko-api`` command it can be started
as::
$ panko-api
.. note::
The development version of the API server logs to stderr, so you
may want to run this step using a screen session or other tool for
maintaining a long-running program in the background.
panko-7.0.0/doc/source/install/development.rst 0000664 0001750 0001750 00000002464 13551610116 021444 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2012 Nicolas Barcet for Canonical
2013 New Dream Network, LLC (DreamHost)
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
==============================
Installing development sandbox
==============================
Configuring devstack
====================
.. index::
double: installing; devstack
1. Download devstack_.
2. Create a ``local.conf`` file as input to devstack.
3. The panko services are not enabled by default, so they must be
enabled in ``local.conf`` before running ``stack.sh``.
This example ``local.conf`` file shows all of the settings required for
panko::
[[local|localrc]]
# Enable the Panko devstack plugin
enable_plugin panko https://opendev.org/openstack/panko.git
.. _devstack: http://www.devstack.org/
panko-7.0.0/doc/source/install/uwsgi.rst 0000664 0001750 0001750 00000003440 13551610116 020253 0 ustar zuul zuul 0000000 0000000 =============================
Installing the API with uwsgi
=============================
Panko comes with a few example files for configuring the API
service to run behind Apache with ``mod_wsgi``.
app.wsgi
========
The file ``panko/api/app.wsgi`` sets up the V2 API WSGI
application. The file is installed with the rest of the Panko
application code, and should not need to be modified.
Example of uwsgi configuration file
===================================
Create panko-uwsgi.ini file::
[uwsgi]
http = 0.0.0.0:8041
wsgi-file = /panko/api/app.wsgi
plugins = python
# This is running standalone
master = true
# Set die-on-term & exit-on-reload so that uwsgi shuts down
exit-on-reload = true
die-on-term = true
# uwsgi recommends this to prevent thundering herd on accept.
thunder-lock = true
# Override the default size for headers from the 4k default. (mainly for keystone token)
buffer-size = 65535
enable-threads = true
# Set the number of threads usually with the returns of command nproc
threads = 8
# Make sure the client doesn't try to re-use the connection.
add-header = Connection: close
# Set uid and gip to an appropriate user on your server. In many
# installations ``panko`` will be correct.
uid = panko
gid = panko
Then start the uwsgi server::
uwsgi ./panko-uwsgi.ini
Or start in background with::
uwsgi -d ./panko-uwsgi.ini
Configuring with uwsgi-plugin-python on Debian/Ubuntu
=====================================================
Install the Python plugin for uwsgi::
apt-get install uwsgi-plugin-python
Run the server::
uwsgi_python --master --die-on-term --logto /var/log/panko/panko-api.log \
--http-socket :8042 --wsgi-file /usr/share/panko-common/app.wsgi
panko-7.0.0/doc/source/install/index.rst 0000664 0001750 0001750 00000001415 13551610116 020224 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2013 New Dream Network, LLC (DreamHost)
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. _install:
================
Installing Panko
================
.. toctree::
:maxdepth: 2
development
manual
mod_wsgi
uwsgi
panko-7.0.0/doc/source/install/mod_wsgi.rst 0000664 0001750 0001750 00000003322 13551610116 020724 0 ustar zuul zuul 0000000 0000000 ..
Copyright 2013 New Dream Network, LLC (DreamHost)
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
==================================
Installing the API behind mod_wsgi
==================================
Panko comes with a few example files for configuring the API
service to run behind Apache with ``mod_wsgi``.
app.wsgi
========
The file ``panko/api/app.wsgi`` sets up the V2 API WSGI
application. The file is installed with the rest of the panko
application code, and should not need to be modified.
etc/apache2/panko
=================
The ``etc/apache2/panko`` file contains example settings that
work with a copy of panko installed via devstack.
.. literalinclude:: ../../../etc/apache2/panko
1. On deb-based systems copy or symlink the file to
``/etc/apache2/sites-available``. For rpm-based systems the file will go in
``/etc/httpd/conf.d``.
2. Modify the ``WSGIDaemonProcess`` directive to set the ``user`` and
``group`` values to an appropriate user on your server. In many
installations ``panko`` will be correct.
3. Enable the panko site. On deb-based systems::
$ a2ensite panko
$ service apache2 reload
On rpm-based systems::
$ service httpd reload
panko-7.0.0/doc/Makefile 0000664 0001750 0001750 00000013777 13551610116 015073 0 ustar zuul zuul 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " wadl to build a WADL file for api.openstack.org"
clean:
-rm -rf $(BUILDDIR)/*
html: check-dependencies
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: check-dependencies
check-dependencies:
@python -c 'import sphinxcontrib.autohttp.flask' >/dev/null 2>&1 || (echo "ERROR: Missing Sphinx dependencies. Run: pip install sphinxcontrib-httpdomain" && exit 1)
@ld -ltidy >/dev/null 2>&1 || (echo "Error: Missing libtidy dependencies. Pls. install libtidy with system package manager" && exit 1)
wadl:
$(SPHINXBUILD) -b docbook $(ALLSPHINXOPTS) $(BUILDDIR)/wadl
@echo
@echo "Build finished. The WADL pages are in $(BUILDDIR)/wadl."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Panko.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Panko.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Panko"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Panko"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
panko-7.0.0/setup.cfg 0000664 0001750 0001750 00000003777 13551610251 014506 0 ustar zuul zuul 0000000 0000000 [metadata]
name = panko
summary = Event storage publisher and API for Ceilometer
description-file =
README.rst
author = OpenStack
author-email = openstack-dev@lists.openstack.org
home-page = https://docs.openstack.org/panko/latest/
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Topic :: System :: Monitoring
[global]
setup-hooks =
pbr.hooks.setup_hook
[files]
packages =
panko
[entry_points]
panko.storage =
es = panko.storage.impl_elasticsearch:Connection
log = panko.storage.impl_log:Connection
mongodb = panko.storage.impl_mongodb:Connection
mysql = panko.storage.impl_sqlalchemy:Connection
postgresql = panko.storage.impl_sqlalchemy:Connection
sqlite = panko.storage.impl_sqlalchemy:Connection
hbase = panko.storage.impl_hbase:Connection
console_scripts =
panko-dbsync = panko.cmd.storage:dbsync
panko-expirer = panko.cmd.storage:expirer
wsgi_scripts =
panko-api = panko.api.app:build_wsgi_app
ceilometer.event.publisher =
panko = panko.publisher.database:DatabasePublisher
oslo.config.opts =
panko = panko.opts:list_opts
oslo.policy.policies =
panko = panko.policies:list_policies
oslo.config.opts.defaults =
panko = panko.conf.defaults:set_cors_middleware_defaults
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source
[pbr]
warnerrors = true
autodoc_index_modules = true
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = panko/locale/panko.pot
[compile_catalog]
directory = panko/locale
domain = panko
[update_catalog]
domain = panko
output_dir = panko/locale
input_file = panko/locale/panko.pot
[egg_info]
tag_build =
tag_date = 0
panko-7.0.0/requirements.txt 0000664 0001750 0001750 00000002223 13551610116 016132 0 ustar zuul zuul 0000000 0000000 # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
debtcollector>=1.2.0 # Apache-2.0
tenacity>=3.1.0 # Apache-2.0
keystonemiddleware!=4.1.0,!=4.19.0,>=4.0.0 # Apache-2.0
lxml>=2.3 # BSD
oslo.db>=4.1.0 # Apache-2.0
oslo.config>=3.9.0 # Apache-2.0
oslo.i18n>=2.1.0 # Apache-2.0
oslo.log>=1.14.0 # Apache-2.0
oslo.policy>=0.5.0 # Apache-2.0
oslo.reports>=0.6.0 # Apache-2.0
Paste
PasteDeploy>=1.5.0 # MIT
pbr>=2.0.0 # Apache-2.0
pecan>=1.0.0 # BSD
oslo.middleware>=3.10.0 # Apache-2.0
oslo.serialization>=2.25.0 # Apache-2.0
oslo.utils>=3.5.0 # Apache-2.0
PyYAML>=3.1.0 # MIT
six>=1.9.0 # MIT
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
stevedore>=1.9.0 # Apache-2.0
WebOb>=1.2.3 # MIT
WSME>=0.8 # MIT
alembic>=0.7.6,!=0.8.1,!=0.9.0
# NOTE(jd) We do not import it directly, but WSME datetime string parsing
# behaviour changes when this library is installed
python-dateutil>=2.4.2 # BSD
happybase!=0.7,!=1.0.0;python_version=='2.7' # MIT
pymongo!=3.1 # Apache-2.0
elasticsearch<3.0.0 # Apache-2.0
panko-7.0.0/.zuul.yaml 0000664 0001750 0001750 00000002226 13551610116 014612 0 ustar zuul zuul 0000000 0000000 - project:
templates:
- openstack-python-jobs
- openstack-python3-train-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
check:
jobs:
- telemetry-dsvm-integration
- telemetry-dsvm-integration-ipv6-only
# TripleO jobs that deploy Telemetry.
# Note we don't use a project-template here, so it's easier
# to disable voting on one specific job if things go wrong.
# tripleo-ci-centos-7-scenario00(1|2)-multinode-oooq will only
# run on stable/pike while the -container will run in Queens
# and beyond.
# If you need any support to debug these jobs in case of
# failures, please reach us on #tripleo IRC channel.
- tripleo-ci-centos-7-scenario001-multinode-oooq:
voting: false
- tripleo-ci-centos-7-scenario001-standalone:
voting: false
- tripleo-ci-centos-7-scenario002-multinode-oooq:
voting: false
- tripleo-ci-centos-7-scenario002-standalone:
voting: false
gate:
jobs:
- telemetry-dsvm-integration
- telemetry-dsvm-integration-ipv6-only
panko-7.0.0/HACKING.rst 0000664 0001750 0001750 00000002033 13551610116 014443 0 ustar zuul zuul 0000000 0000000 Panko Style Commandments
========================
- Step 1: Read the OpenStack Style Commandments
https://docs.openstack.org/hacking/latest/
- Step 2: Read on
Panko Specific Commandments
---------------------------
- [C301] LOG.warn() is not allowed. Use LOG.warning()
- [C302] Deprecated library function os.popen()
Creating Unit Tests
-------------------
For every new feature, unit tests should be created that both test and
(implicitly) document the usage of said feature. If submitting a patch for a
bug that had no unit test, a new passing unit test should be added. If a
submitted bug fix does have a unit test, be sure to add a new one that fails
without the patch and passes with the patch.
All unittest classes must ultimately inherit from testtools.TestCase.
All setUp and tearDown methods must upcall using the super() method.
tearDown methods should be avoided and addCleanup calls should be preferred.
Never manually create tempfiles. Always use the tempfile fixtures from
the fixture library to ensure that they are cleaned up.
panko-7.0.0/panko/ 0000775 0001750 0001750 00000000000 13551610251 013757 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/policies/ 0000775 0001750 0001750 00000000000 13551610251 015566 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/policies/base.py 0000664 0001750 0001750 00000001373 13551610116 017056 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
ROLE_ADMIN = 'role:admin'
UNPROTECTED = ''
rules = [
policy.RuleDefault(
name='context_is_admin',
check_str=ROLE_ADMIN
)
]
def list_rules():
return rules
panko-7.0.0/panko/policies/telemetry.py 0000664 0001750 0001750 00000002503 13551610116 020152 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from panko.policies import base
TELEMETRY_EVENTS = 'telemetry:events:%s'
rules = [
policy.DocumentedRuleDefault(
name=TELEMETRY_EVENTS % 'index',
check_str=base.UNPROTECTED,
description='Return all events matching the query filters.',
operations=[
{
'path': '/v2/events',
'method': 'GET'
}
]
),
policy.DocumentedRuleDefault(
name=TELEMETRY_EVENTS % 'show',
check_str=base.UNPROTECTED,
description='Return a single event with the given message id.',
operations=[
{
'path': '/v2/events/{message_id}',
'method': 'GET'
}
]
)
]
def list_rules():
return rules
panko-7.0.0/panko/policies/__init__.py 0000664 0001750 0001750 00000001465 13551610116 017705 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from panko.policies import base
from panko.policies import segregation
from panko.policies import telemetry
def list_policies():
return itertools.chain(
base.list_rules(),
segregation.list_rules(),
telemetry.list_rules()
)
panko-7.0.0/panko/policies/segregation.py 0000664 0001750 0001750 00000002075 13551610116 020453 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
rules = [
policy.DocumentedRuleDefault(
name='segregation',
check_str='rule:context_is_admin',
description='Return the user and project the request'
'should be limited to',
operations=[
{
'path': '/v2/events',
'method': 'GET'
},
{
'path': '/v2/events/{message_id}',
'method': 'GET'
}
]
)
]
def list_rules():
return rules
panko-7.0.0/panko/publisher/ 0000775 0001750 0001750 00000000000 13551610251 015754 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/publisher/__init__.py 0000664 0001750 0001750 00000000000 13551610116 020053 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/publisher/database.py 0000664 0001750 0001750 00000002565 13551610116 020102 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from panko import service
from panko import storage
class DatabasePublisher(object):
"""Publisher class for recording event data into database.
The publisher class which records each event into a database configured
in Ceilometer configuration file.
To enable this publisher, the following section needs to be present in
panko.conf file
[database]
connection = mysql+pymysql://panko:password@127.0.0.1/panko?charset=utf8
Then, panko:// should be added to Ceilometer's event_pipeline.yaml
"""
def __init__(self, ceilo_conf, parsed_url):
conf = service.prepare_service([], share=True)
self.conn = storage.get_connection_from_config(conf)
def publish_events(self, events):
if not isinstance(events, list):
events = [events]
self.conn.record_events(events)
panko-7.0.0/panko/utils.py 0000664 0001750 0001750 00000007601 13551610116 015475 0 ustar zuul zuul 0000000 0000000 # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import calendar
import copy
import datetime
import decimal
from oslo_utils import timeutils
from oslo_utils import units
import six
def decode_unicode(input):
"""Decode the unicode of the message, and encode it into utf-8."""
if isinstance(input, dict):
temp = {}
# If the input data is a dict, create an equivalent dict with a
# predictable insertion order to avoid inconsistencies in the
# message signature computation for equivalent payloads modulo
# ordering
for key, value in sorted(six.iteritems(input)):
temp[decode_unicode(key)] = decode_unicode(value)
return temp
elif isinstance(input, (tuple, list)):
# When doing a pair of JSON encode/decode operations to the tuple,
# the tuple would become list. So we have to generate the value as
# list here.
return [decode_unicode(element) for element in input]
elif six.PY2 and isinstance(input, six.text_type):
return input.encode('utf-8')
elif six.PY3 and isinstance(input, six.binary_type):
return input.decode('utf-8')
else:
return input
def recursive_keypairs(d, separator=':'):
"""Generator that produces sequence of keypairs for nested dictionaries."""
for name, value in sorted(six.iteritems(d)):
if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator):
yield ('%s%s%s' % (name, separator, subname), subvalue)
elif isinstance(value, (tuple, list)):
yield name, decode_unicode(value)
else:
yield name, value
def dt_to_decimal(utc):
"""Datetime to Decimal.
Some databases don't store microseconds in datetime
so we always store as Decimal unixtime.
"""
if utc is None:
return None
decimal.getcontext().prec = 30
return (decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) +
(decimal.Decimal(str(utc.microsecond)) /
decimal.Decimal("1000000.0")))
def decimal_to_dt(dec):
"""Return a datetime from Decimal unixtime format."""
if dec is None:
return None
integer = int(dec)
micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(units.M)
daittyme = datetime.datetime.utcfromtimestamp(integer)
return daittyme.replace(microsecond=int(round(micro)))
def sanitize_timestamp(timestamp):
"""Return a naive utc datetime object."""
if not timestamp:
return timestamp
if not isinstance(timestamp, datetime.datetime):
timestamp = timeutils.parse_isotime(timestamp)
return timeutils.normalize_time(timestamp)
def update_nested(original_dict, updates):
"""Updates the leaf nodes in a nest dict.
Updates occur without replacing entire sub-dicts.
"""
dict_to_update = copy.deepcopy(original_dict)
for key, value in six.iteritems(updates):
if isinstance(value, dict):
sub_dict = update_nested(dict_to_update.get(key, {}), value)
dict_to_update[key] = sub_dict
else:
dict_to_update[key] = updates[key]
return dict_to_update
panko-7.0.0/panko/i18n.py 0000664 0001750 0001750 00000002042 13551610116 015106 0 ustar zuul zuul 0000000 0000000 # Copyright 2014 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See https://docs.openstack.org/oslo.i18n/latest/user/usage.html
"""
import oslo_i18n
DOMAIN = 'panko'
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
def translate(value, user_locale):
return oslo_i18n.translate(value, user_locale)
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
panko-7.0.0/panko/locale/ 0000775 0001750 0001750 00000000000 13551610251 015216 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/locale/en_GB/ 0000775 0001750 0001750 00000000000 13551610251 016170 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/locale/en_GB/LC_MESSAGES/ 0000775 0001750 0001750 00000000000 13551610251 017755 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/locale/en_GB/LC_MESSAGES/panko.po 0000664 0001750 0001750 00000005116 13551610116 021430 0 ustar zuul zuul 0000000 0000000 # Andi Chandler , 2017. #zanata
msgid ""
msgstr ""
"Project-Id-Version: panko VERSION\n"
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n"
"POT-Creation-Date: 2018-05-22 10:08+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"PO-Revision-Date: 2017-07-11 05:07+0000\n"
"Last-Translator: Andi Chandler \n"
"Language-Team: English (United Kingdom)\n"
"Language: en_GB\n"
"X-Generator: Zanata 4.3.3\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
#, python-format
msgid "%(entity)s %(id)s Not Found"
msgstr "%(entity)s %(id)s Not Found"
#, python-format
msgid "Cannot create table %(table_name)s it already exists. Ignoring error"
msgstr "Cannot create table %(table_name)s it already exists. Ignoring error"
msgid "Event"
msgstr "Event"
#, python-format
msgid "Not Authorized to access %(aspect)s %(id)s"
msgstr "Not Authorised to access %(aspect)s %(id)s"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `eq' operator is available for "
"field %(field)s"
msgstr ""
"Operator %(operator)s is not supported. Only `eq' operator is available for "
"field %(field)s"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `ge' operator is available for "
"field %(field)s"
msgstr ""
"Operator %(operator)s is not supported. Only `ge' operator is available for "
"field %(field)s"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `le' operator is available for "
"field %(field)s"
msgstr ""
"Operator %(operator)s is not supported. Only `le' operator is available for "
"field %(field)s"
#, python-format
msgid ""
"Operator %(operator)s is not supported. The supported operators are: "
"%(supported)s"
msgstr ""
"Operator %(operator)s is not supported. The supported operators are: "
"%(supported)s"
#, python-format
msgid ""
"The data type %(type)s is not supported. The supported data type list is: "
"%(supported)s"
msgstr ""
"The data type %(type)s is not supported. The supported data type list is: "
"%(supported)s"
#, python-format
msgid "Unable to connect to the database server: %(errmsg)s."
msgstr "Unable to connect to the database server: %(errmsg)s."
#, python-format
msgid ""
"Unable to convert the value %(value)s to the expected data type %(type)s."
msgstr ""
"Unable to convert the value %(value)s to the expected data type %(type)s."
#, python-format
msgid ""
"Unexpected exception converting %(value)s to the expected data type %(type)s."
msgstr ""
"Unexpected exception converting %(value)s to the expected data type %(type)s."
panko-7.0.0/panko/locale/ko_KR/ 0000775 0001750 0001750 00000000000 13551610251 016223 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/locale/ko_KR/LC_MESSAGES/ 0000775 0001750 0001750 00000000000 13551610251 020010 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/locale/ko_KR/LC_MESSAGES/panko.po 0000664 0001750 0001750 00000005314 13551610116 021463 0 ustar zuul zuul 0000000 0000000 # JongSoo Ha , 2018. #zanata
msgid ""
msgstr ""
"Project-Id-Version: panko VERSION\n"
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n"
"POT-Creation-Date: 2019-04-10 01:12+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"PO-Revision-Date: 2018-11-12 03:43+0000\n"
"Last-Translator: JongSoo Ha \n"
"Language-Team: Korean (South Korea)\n"
"Language: ko_KR\n"
"X-Generator: Zanata 4.3.3\n"
"Plural-Forms: nplurals=1; plural=0\n"
#, python-format
msgid "%(entity)s %(id)s Not Found"
msgstr "%(entity)s %(id)s 발견되지 않음"
#, python-format
msgid "Cannot create table %(table_name)s it already exists. Ignoring error"
msgstr " %(table_name)s 이 이미 존재하므로 테이블 추가 불가능. 에러 무시"
msgid "Event"
msgstr "이벤트"
#, python-format
msgid "Not Authorized to access %(aspect)s %(id)s"
msgstr "%(aspect)s %(id)s로의 허가되지 않은 접근"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `eq' operator is available for "
"field %(field)s"
msgstr ""
"연산자 %(operator)s 는 지원되지 않음. 오직 `eq' 연산자만이 필드 %(field)s에"
"서 사용가능"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `ge' operator is available for "
"field %(field)s"
msgstr ""
"연산자 %(operator)s 는 지원되지 않음. 오직 `ge' 연산자만이 필드 %(field)s에"
"서 사용가능"
#, python-format
msgid ""
"Operator %(operator)s is not supported. Only `le' operator is available for "
"field %(field)s"
msgstr ""
"연산자 %(operator)s 는 지원되지 않음. 오직 `le' 연산자만이 필드 %(field)s에"
"서 사용가능"
#, python-format
msgid ""
"Operator %(operator)s is not supported. The supported operators are: "
"%(supported)s"
msgstr "연산자 %(operator)s 는 지원되지 않음. 지원되는 연산자들: %(supported)s"
#, python-format
msgid ""
"The data type %(type)s is not supported. The supported data type list is: "
"%(supported)s"
msgstr ""
"데이터타입 %(type)s 은 지원되지 않음. 지원되는 데이터타입 목록 : "
"%(supported)s"
#, python-format
msgid "Unable to connect to the database server: %(errmsg)s."
msgstr "데이터베이스 서버로 접속 불가 : %(errmsg)s."
#, python-format
msgid ""
"Unable to convert the value %(value)s to the expected data type %(type)s."
msgstr "값 %(value)s 를 희망하는 데이터 타입 %(type)s 으로의 변환 불가"
#, python-format
msgid ""
"Unexpected exception converting %(value)s to the expected data type %(type)s."
msgstr ""
" %(value)s를 예측 데이터타입 %(type)s 으로 변환도중 예측치 못한 예외 발생"
panko-7.0.0/panko/storage/ 0000775 0001750 0001750 00000000000 13551610251 015423 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/impl_mongodb.py 0000664 0001750 0001750 00000010132 13551610116 020440 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""MongoDB storage backend"""
from oslo_log import log
import pymongo
from panko import storage
from panko.storage.mongo import utils as pymongo_utils
from panko.storage import pymongo_base
LOG = log.getLogger(__name__)
class Connection(pymongo_base.Connection):
"""Put the event data into a MongoDB database."""
CONNECTION_POOL = pymongo_utils.ConnectionPool()
def __init__(self, url, conf):
# NOTE(jd) Use our own connection pooling on top of the Pymongo one.
# We need that otherwise we overflow the MongoDB instance with new
# connection since we instantiate a Pymongo client each time someone
# requires a new storage connection.
self.conn = self.CONNECTION_POOL.connect(
url,
conf.database.max_retries,
conf.database.retry_interval)
# Require MongoDB 2.4 to use $setOnInsert
if self.conn.server_info()['versionArray'] < [2, 4]:
raise storage.StorageBadVersion("Need at least MongoDB 2.4")
connection_options = pymongo.uri_parser.parse_uri(url)
self.db = getattr(self.conn, connection_options['database'])
if connection_options.get('username'):
self.db.authenticate(connection_options['username'],
connection_options['password'])
# NOTE(jd) Upgrading is just about creating index, so let's do this
# on connection to be sure at least the TTL is correctly updated if
# needed.
self.upgrade()
@staticmethod
def update_ttl(ttl, ttl_index_name, index_field, coll):
"""Update or create time_to_live indexes.
:param ttl: time to live in seconds.
:param ttl_index_name: name of the index we want to update or create.
:param index_field: field with the index that we need to update.
:param coll: collection which indexes need to be updated.
"""
indexes = coll.index_information()
if ttl <= 0:
if ttl_index_name in indexes:
coll.drop_index(ttl_index_name)
return
if ttl_index_name in indexes:
return coll.database.command(
'collMod', coll.name,
index={'keyPattern': {index_field: pymongo.ASCENDING},
'expireAfterSeconds': ttl})
coll.create_index([(index_field, pymongo.ASCENDING)],
expireAfterSeconds=ttl,
name=ttl_index_name)
def upgrade(self):
# create collection if not present
if 'event' not in self.db.conn.collection_names():
self.db.conn.create_collection('event')
# Establish indexes
# NOTE(idegtiarov): This indexes cover get_events, get_event_types, and
# get_trait_types requests based on event_type and timestamp fields.
self.db.event.create_index(
[('event_type', pymongo.ASCENDING),
('timestamp', pymongo.ASCENDING)],
name='event_type_idx'
)
def clear(self):
self.conn.drop_database(self.db.name)
# Connection will be reopened automatically if needed
self.conn.close()
def clear_expired_data(self, ttl):
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
self.update_ttl(ttl, 'event_ttl', 'timestamp', self.db.event)
LOG.info("Clearing expired event data is based on native "
"MongoDB time to live feature and going in background.")
panko-7.0.0/panko/storage/impl_sqlalchemy.py 0000664 0001750 0001750 00000050060 13551610116 021161 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy storage backend."""
from __future__ import absolute_import
import collections
import datetime
from oslo_db import exception as dbexc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as oslo_sql_utils
from oslo_log import log
from oslo_utils import importutils
from oslo_utils import timeutils
import sqlalchemy as sa
from sqlalchemy.engine import url as sqlalchemy_url
from sqlalchemy.orm import aliased
from panko import storage
from panko.storage import base
from panko.storage import models as api_models
from panko.storage.sqlalchemy import models
from panko import utils
LOG = log.getLogger(__name__)
osprofiler_sqlalchemy = importutils.try_import('osprofiler.sqlalchemy')
AVAILABLE_CAPABILITIES = {
'events': {'query': {'simple': True}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
TRAIT_MAPLIST = [(api_models.Trait.NONE_TYPE, models.TraitText),
(api_models.Trait.TEXT_TYPE, models.TraitText),
(api_models.Trait.INT_TYPE, models.TraitInt),
(api_models.Trait.FLOAT_TYPE, models.TraitFloat),
(api_models.Trait.DATETIME_TYPE, models.TraitDatetime)]
TRAIT_ID_TO_MODEL = dict((x, y) for x, y in TRAIT_MAPLIST)
TRAIT_MODEL_TO_ID = dict((y, x) for x, y in TRAIT_MAPLIST)
trait_models_dict = {'string': models.TraitText,
'integer': models.TraitInt,
'datetime': models.TraitDatetime,
'float': models.TraitFloat}
def _get_model_and_conditions(trait_type, key, value, op='eq'):
trait_model = aliased(trait_models_dict[trait_type])
op_dict = {'eq': (trait_model.value == value),
'lt': (trait_model.value < value),
'le': (trait_model.value <= value),
'gt': (trait_model.value > value),
'ge': (trait_model.value >= value),
'ne': (trait_model.value != value)}
conditions = [trait_model.key == key, op_dict[op]]
return (trait_model, conditions)
class Connection(base.Connection):
"""Put the event data into a SQLAlchemy database.
Tables::
- EventType
- event definition
- { id: event type id
desc: description of event
}
- Event
- event data
- { id: event id
message_id: message id
generated = timestamp of event
event_type_id = event type -> eventtype.id
}
- TraitInt
- int trait value
- { event_id: event -> event.id
key: trait name
value: integer value
}
- TraitDatetime
- datetime trait value
- { event_id: event -> event.id
key: trait name
value: datetime value
}
- TraitText
- text trait value
- { event_id: event -> event.id
key: trait name
value: text value
}
- TraitFloat
- float trait value
- { event_id: event -> event.id
key: trait name
value: float value
}
"""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
def __init__(self, url, conf):
# Set max_retries to 0, since oslo.db in certain cases may attempt
# to retry making the db connection retried max_retries ^ 2 times
# in failure case and db reconnection has already been implemented
# in storage.__init__.get_connection_from_config function
options = dict(conf.database.items())
options['max_retries'] = 0
# oslo.db doesn't support options defined by Panko
for opt in storage.OPTS:
options.pop(opt.name, None)
self._engine_facade = db_session.EngineFacade(self.dress_url(url),
**options)
if osprofiler_sqlalchemy:
osprofiler_sqlalchemy.add_tracing(sa,
self._engine_facade.get_engine(),
'db')
@staticmethod
def dress_url(url):
# If no explicit driver has been set, we default to pymysql
if url.startswith("mysql://"):
url = sqlalchemy_url.make_url(url)
url.drivername = "mysql+pymysql"
return str(url)
return url
def upgrade(self):
engine = self._engine_facade.get_engine()
models.Base.metadata.create_all(engine)
def clear(self):
engine = self._engine_facade.get_engine()
for table in reversed(models.Base.metadata.sorted_tables):
engine.execute(table.delete())
engine.dispose()
def _get_or_create_event_type(self, event_type, session):
"""Check if an event type with the supplied name is already exists.
If not, we create it and return the record. This may result in a flush.
"""
try:
with session.begin(nested=True):
et = session.query(models.EventType).filter(
models.EventType.desc == event_type).first()
if not et:
et = models.EventType(event_type)
session.add(et)
except dbexc.DBDuplicateEntry:
et = self._get_or_create_event_type(event_type, session)
return et
def record_events(self, event_models):
"""Write the events to SQL database via sqlalchemy.
:param event_models: a list of model.Event objects.
"""
session = self._engine_facade.get_session()
error = None
for event_model in event_models:
event = None
try:
with session.begin():
event_type = self._get_or_create_event_type(
event_model.event_type, session=session)
event = models.Event(event_model.message_id, event_type,
event_model.generated,
event_model.raw)
session.add(event)
session.flush()
if event_model.traits:
trait_map = {}
for trait in event_model.traits:
if trait_map.get(trait.dtype) is None:
trait_map[trait.dtype] = []
trait_map[trait.dtype].append(
{'event_id': event.id,
'key': trait.name,
'value': trait.value})
for dtype in trait_map.keys():
model = TRAIT_ID_TO_MODEL[dtype]
session.execute(model.__table__.insert(),
trait_map[dtype])
except dbexc.DBDuplicateEntry as e:
LOG.debug("Duplicate event detected, skipping it: %s", e)
except KeyError as e:
LOG.exception('Failed to record event: %s', e)
except Exception as e:
LOG.exception('Failed to record event: %s', e)
error = e
if error:
raise error
def _get_pagination_query(self, query, pagination, api_model, model):
limit = pagination.get('limit')
marker = None
if pagination.get('marker'):
marker_filter = storage.EventFilter(
message_id=pagination.get('marker'))
markers = list(self.get_events(marker_filter))
if markers:
marker = markers[0]
else:
raise storage.InvalidMarker(
'Marker %s not found.' % pagination['marker'])
if not pagination.get('sort'):
pagination['sort'] = api_model.DEFAULT_SORT
sort_keys = [s[0] for s in pagination['sort']]
sort_dirs = [s[1] for s in pagination['sort']]
return oslo_sql_utils.paginate_query(
query, model, limit, sort_keys, sort_dirs=sort_dirs, marker=marker)
def get_events(self, event_filter, pagination=None):
"""Return an iterable of model.Event objects.
:param event_filter: EventFilter instance
:param pagination: Pagination parameters.
"""
pagination = pagination or {}
session = self._engine_facade.get_session()
with session.begin():
# Build up the join conditions
event_join_conditions = [models.EventType.id ==
models.Event.event_type_id]
if event_filter.event_type:
event_join_conditions.append(models.EventType.desc ==
event_filter.event_type)
# Build up the where conditions
event_filter_conditions = []
if event_filter.message_id:
event_filter_conditions.append(
models.Event.message_id == event_filter.message_id)
if event_filter.start_timestamp:
event_filter_conditions.append(
models.Event.generated >= event_filter.start_timestamp)
if event_filter.end_timestamp:
event_filter_conditions.append(
models.Event.generated <= event_filter.end_timestamp)
trait_subq = None
# Build trait filter
if event_filter.traits_filter:
filters = list(event_filter.traits_filter)
trait_filter = filters.pop()
key = trait_filter.pop('key')
op = trait_filter.pop('op', 'eq')
trait_type, value = list(trait_filter.items())[0]
trait_model, conditions = _get_model_and_conditions(
trait_type, key, value, op)
trait_subq = (session
.query(trait_model.event_id.label('ev_id'))
.filter(*conditions))
first_model = trait_model
for label_num, trait_filter in enumerate(filters):
key = trait_filter.pop('key')
op = trait_filter.pop('op', 'eq')
trait_type, value = list(trait_filter.items())[0]
trait_model, conditions = _get_model_and_conditions(
trait_type, key, value, op)
trait_subq = (
trait_subq
.add_columns(
trait_model.event_id.label('l%d' % label_num))
.filter(
first_model.event_id == trait_model.event_id,
*conditions))
trait_subq = trait_subq.subquery()
query = (session.query(models.Event.id)
.join(models.EventType,
sa.and_(*event_join_conditions)))
if trait_subq is not None:
query = query.join(trait_subq,
trait_subq.c.ev_id == models.Event.id)
if event_filter.admin_proj:
no_proj_q = session.query(models.TraitText.event_id).filter(
models.TraitText.key == 'project_id')
admin_q = (session.query(models.TraitText.event_id).filter(
~sa.exists().where(models.TraitText.event_id ==
no_proj_q.subquery().c.event_id)).union(
session.query(models.TraitText.event_id).filter(sa.and_(
models.TraitText.key == 'project_id',
models.TraitText.value == event_filter.admin_proj,
models.Event.id == models.TraitText.event_id))))
query = query.filter(sa.exists().where(
models.Event.id ==
admin_q.subquery().c.trait_text_event_id))
if event_filter_conditions:
query = query.filter(sa.and_(*event_filter_conditions))
query = self._get_pagination_query(
query, pagination, api_models.Event, models.Event)
event_list = collections.OrderedDict()
# get a list of all events that match filters
for (id_, generated, message_id,
desc, raw) in query.add_columns(
models.Event.generated, models.Event.message_id,
models.EventType.desc, models.Event.raw).all():
event_list[id_] = api_models.Event(
message_id, desc, generated, [], raw)
# Query all traits related to events.
# NOTE (gordc): cast is done because pgsql defaults to TEXT when
# handling unknown values such as null.
trait_q = (
session.query(
models.TraitDatetime.event_id,
models.TraitDatetime.key, models.TraitDatetime.value,
sa.cast(sa.null(), sa.Integer),
sa.cast(sa.null(), sa.Float(53)),
sa.cast(sa.null(), sa.String(255)))
.filter(sa.exists().where(
models.TraitDatetime.event_id == query.subquery().c.id))
).union_all(
session.query(
models.TraitInt.event_id,
models.TraitInt.key, sa.null(),
models.TraitInt.value, sa.null(), sa.null())
.filter(sa.exists().where(
models.TraitInt.event_id == query.subquery().c.id)),
session.query(
models.TraitFloat.event_id,
models.TraitFloat.key, sa.null(), sa.null(),
models.TraitFloat.value, sa.null())
.filter(sa.exists().where(
models.TraitFloat.event_id == query.subquery().c.id)),
session.query(
models.TraitText.event_id,
models.TraitText.key, sa.null(), sa.null(), sa.null(),
models.TraitText.value)
.filter(sa.exists().where(
models.TraitText.event_id == query.subquery().c.id)))
for id_, key, t_date, t_int, t_float, t_text in (
trait_q.order_by(models.TraitDatetime.key)).all():
if t_int is not None:
dtype = api_models.Trait.INT_TYPE
val = t_int
elif t_float is not None:
dtype = api_models.Trait.FLOAT_TYPE
val = t_float
elif t_date is not None:
dtype = api_models.Trait.DATETIME_TYPE
val = t_date
else:
dtype = api_models.Trait.TEXT_TYPE
val = t_text
try:
trait_model = api_models.Trait(key, dtype, val)
event_list[id_].append_trait(trait_model)
except KeyError:
# NOTE(gordc): this is expected as we do not set REPEATABLE
# READ (bug 1506717). if query is run while recording new
# event data, trait query may return more data than event
# query. they can be safely discarded.
pass
return event_list.values()
def get_event_types(self):
"""Return all event types as an iterable of strings."""
session = self._engine_facade.get_session()
with session.begin():
query = (session.query(models.EventType.desc).
order_by(models.EventType.desc))
for name in query.all():
# The query returns a tuple with one element.
yield name[0]
def get_trait_types(self, event_type):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event
"""
session = self._engine_facade.get_session()
with session.begin():
for trait_model in [models.TraitText, models.TraitInt,
models.TraitFloat, models.TraitDatetime]:
query = (session.query(trait_model.key)
.join(models.Event,
models.Event.id == trait_model.event_id)
.join(models.EventType,
sa.and_(models.EventType.id ==
models.Event.event_type_id,
models.EventType.desc == event_type))
.distinct())
dtype = TRAIT_MODEL_TO_ID.get(trait_model)
for row in query.all():
yield {'name': row[0], 'data_type': dtype}
def get_traits(self, event_type, trait_type=None):
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""
session = self._engine_facade.get_session()
with session.begin():
for trait_model in [models.TraitText, models.TraitInt,
models.TraitFloat, models.TraitDatetime]:
query = (session.query(trait_model.key, trait_model.value)
.join(models.Event,
models.Event.id == trait_model.event_id)
.join(models.EventType,
sa.and_(models.EventType.id ==
models.Event.event_type_id,
models.EventType.desc == event_type))
.order_by(trait_model.key))
if trait_type:
query = query.filter(trait_model.key == trait_type)
dtype = TRAIT_MODEL_TO_ID.get(trait_model)
for k, v in query.all():
yield api_models.Trait(name=k,
dtype=dtype,
value=v)
def clear_expired_data(self, ttl):
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
session = self._engine_facade.get_session()
with session.begin():
end = timeutils.utcnow() - datetime.timedelta(seconds=ttl)
event_q = (session.query(models.Event.id)
.filter(models.Event.generated < end))
event_subq = event_q.subquery()
for trait_model in [models.TraitText, models.TraitInt,
models.TraitFloat, models.TraitDatetime]:
(session.query(trait_model)
.filter(trait_model.event_id.in_(event_subq))
.delete(synchronize_session="fetch"))
event_rows = event_q.delete()
# remove EventType and TraitType with no corresponding
# matching events and traits
(session.query(models.EventType)
.filter(~models.EventType.events.any())
.delete(synchronize_session="fetch"))
LOG.info("%d events are removed from database", event_rows)
panko-7.0.0/panko/storage/mongo/ 0000775 0001750 0001750 00000000000 13551610251 016542 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/mongo/utils.py 0000664 0001750 0001750 00000020717 13551610116 020263 0 ustar zuul zuul 0000000 0000000 #
# Copyright Ericsson AB 2013. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common functions for MongoDB backend
"""
import weakref
from oslo_log import log
from oslo_utils import netutils
import pymongo
import pymongo.errors
import six
import tenacity
from panko.i18n import _
ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS = 86
LOG = log.getLogger(__name__)
EVENT_TRAIT_TYPES = {'none': 0, 'string': 1, 'integer': 2, 'float': 3,
'datetime': 4}
OP_SIGN = {'lt': '$lt', 'le': '$lte', 'ne': '$ne', 'gt': '$gt', 'ge': '$gte'}
MINIMUM_COMPATIBLE_MONGODB_VERSION = [2, 4]
COMPLETE_AGGREGATE_COMPATIBLE_VERSION = [2, 6]
def make_timestamp_range(start, end,
start_timestamp_op=None, end_timestamp_op=None):
"""Create the query document to find timestamps within that range.
This is done by given two possible datetimes and their operations.
By default, using $gte for the lower bound and $lt for the upper bound.
"""
ts_range = {}
if start:
if start_timestamp_op == 'gt':
start_timestamp_op = '$gt'
else:
start_timestamp_op = '$gte'
ts_range[start_timestamp_op] = start
if end:
if end_timestamp_op == 'le':
end_timestamp_op = '$lte'
else:
end_timestamp_op = '$lt'
ts_range[end_timestamp_op] = end
return ts_range
def make_events_query_from_filter(event_filter):
"""Return start and stop row for filtering and a query.
Query is based on the selected parameter.
:param event_filter: storage.EventFilter object.
"""
query = {}
q_list = []
ts_range = make_timestamp_range(event_filter.start_timestamp,
event_filter.end_timestamp)
if ts_range:
q_list.append({'timestamp': ts_range})
if event_filter.event_type:
q_list.append({'event_type': event_filter.event_type})
if event_filter.message_id:
q_list.append({'_id': event_filter.message_id})
if event_filter.traits_filter:
for trait_filter in event_filter.traits_filter:
op = trait_filter.pop('op', 'eq')
dict_query = {}
for k, v in six.iteritems(trait_filter):
if v is not None:
# All parameters in EventFilter['traits'] are optional, so
# we need to check if they are in the query or no.
if k == 'key':
dict_query.setdefault('trait_name', v)
elif k in ['string', 'integer', 'datetime', 'float']:
dict_query.setdefault('trait_type',
EVENT_TRAIT_TYPES[k])
dict_query.setdefault('trait_value',
v if op == 'eq'
else {OP_SIGN[op]: v})
dict_query = {'$elemMatch': dict_query}
q_list.append({'traits': dict_query})
if event_filter.admin_proj:
q_list.append({'$or': [
{'traits': {'$not': {'$elemMatch': {'trait_name': 'project_id'}}}},
{'traits': {
'$elemMatch': {'trait_name': 'project_id',
'trait_value': event_filter.admin_proj}}}]})
if q_list:
query = {'$and': q_list}
return query
class ConnectionPool(object):
def __init__(self):
self._pool = {}
def connect(self, url, max_retries, retry_interval):
connection_options = pymongo.uri_parser.parse_uri(url)
del connection_options['database']
del connection_options['username']
del connection_options['password']
del connection_options['collection']
pool_key = tuple(connection_options)
if pool_key in self._pool:
client = self._pool.get(pool_key)()
if client:
return client
splitted_url = netutils.urlsplit(url)
log_data = {'db': splitted_url.scheme,
'nodelist': connection_options['nodelist']}
LOG.info('Connecting to %(db)s on %(nodelist)s' % log_data)
try:
client = MongoProxy(pymongo.MongoClient(url),
max_retries, retry_interval)
except pymongo.errors.ConnectionFailure as e:
LOG.warning(_('Unable to connect to the database server: '
'%(errmsg)s.') % {'errmsg': e})
raise
self._pool[pool_key] = weakref.ref(client)
return client
def _safe_mongo_call(max_retries, retry_interval):
return tenacity.retry(
retry=tenacity.retry_if_exception_type(
pymongo.errors.AutoReconnect),
wait=tenacity.wait_fixed(retry_interval),
stop=(tenacity.stop_after_attempt(max_retries) if max_retries >= 0
else tenacity.stop_never)
)
MONGO_METHODS = set([typ for typ in dir(pymongo.collection.Collection)
if not typ.startswith('_')])
MONGO_METHODS.update(set([typ for typ in dir(pymongo.MongoClient)
if not typ.startswith('_')]))
MONGO_METHODS.update(set([typ for typ in dir(pymongo)
if not typ.startswith('_')]))
class MongoProxy(object):
def __init__(self, conn, max_retries, retry_interval):
self.conn = conn
self.max_retries = max_retries
self.retry_interval = retry_interval
self._recreate_index = _safe_mongo_call(
self.max_retries, self.retry_interval)(self._recreate_index)
def __getitem__(self, item):
"""Create and return proxy around the method in the connection.
:param item: name of the connection
"""
return MongoProxy(self.conn[item])
def find(self, *args, **kwargs):
# We need this modifying method to return a CursorProxy object so that
# we can handle the Cursor next function to catch the AutoReconnect
# exception.
return CursorProxy(self.conn.find(*args, **kwargs),
self.max_retries,
self.retry_interval)
def create_index(self, keys, name=None, *args, **kwargs):
try:
self.conn.create_index(keys, name=name, *args, **kwargs)
except pymongo.errors.OperationFailure as e:
if e.code is ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS:
LOG.info("Index %s will be recreate." % name)
self._recreate_index(keys, name, *args, **kwargs)
def _recreate_index(self, keys, name, *args, **kwargs):
self.conn.drop_index(name)
self.conn.create_index(keys, name=name, *args, **kwargs)
def __getattr__(self, item):
"""Wrap MongoDB connection.
If item is the name of an executable method, for example find or
insert, wrap this method in the MongoConn.
Else wrap getting attribute with MongoProxy.
"""
if item in ('name', 'database'):
return getattr(self.conn, item)
if item in MONGO_METHODS:
return _safe_mongo_call(
self.max_retries, self.retry_interval
)(getattr(self.conn, item))
return MongoProxy(getattr(self.conn, item),
self.max_retries, self.retry_interval)
def __call__(self, *args, **kwargs):
return self.conn(*args, **kwargs)
class CursorProxy(pymongo.cursor.Cursor):
def __init__(self, cursor, max_retry, retry_interval):
self.cursor = cursor
self.next = _safe_mongo_call(max_retry, retry_interval)(self._next)
def __getitem__(self, item):
return self.cursor[item]
def _next(self):
"""Wrap Cursor next method.
This method will be executed before each Cursor next method call.
"""
try:
save_cursor = self.cursor.clone()
return self.cursor.next()
except pymongo.errors.AutoReconnect:
self.cursor = save_cursor
raise
def __getattr__(self, item):
return getattr(self.cursor, item)
panko-7.0.0/panko/storage/mongo/__init__.py 0000664 0001750 0001750 00000000000 13551610116 020641 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/impl_log.py 0000664 0001750 0001750 00000001747 13551610116 017610 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from panko.storage import base
LOG = log.getLogger(__name__)
class Connection(base.Connection):
"""Log event data."""
@staticmethod
def clear_expired_data(ttl):
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
LOG.info("Dropping event data with TTL %d", ttl)
panko-7.0.0/panko/storage/pymongo_base.py 0000664 0001750 0001750 00000013705 13551610116 020465 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common functions for MongoDB backend
"""
from oslo_log import log
import pymongo
from panko.storage import base
from panko.storage import models
from panko.storage.mongo import utils as pymongo_utils
from panko import utils
LOG = log.getLogger(__name__)
COMMON_AVAILABLE_CAPABILITIES = {
'events': {'query': {'simple': True}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
class Connection(base.Connection):
"""Base event Connection class for MongoDB driver."""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
COMMON_AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
def record_events(self, event_models):
"""Write the events to database.
:param event_models: a list of models.Event objects.
"""
error = None
for event_model in event_models:
traits = []
if event_model.traits:
for trait in event_model.traits:
traits.append({'trait_name': trait.name,
'trait_type': trait.dtype,
'trait_value': trait.value})
try:
self.db.event.insert_one(
{'_id': event_model.message_id,
'event_type': event_model.event_type,
'timestamp': event_model.generated,
'traits': traits, 'raw': event_model.raw})
except pymongo.errors.DuplicateKeyError as ex:
LOG.debug("Duplicate event detected, skipping it: %s", ex)
except Exception as ex:
LOG.exception("Failed to record event: %s", ex)
error = ex
if error:
raise error
def get_events(self, event_filter, pagination=None):
"""Return an iter of models.Event objects.
:param event_filter: storage.EventFilter object, consists of filters
for events that are stored in database.
:param pagination: Pagination parameters.
"""
limit = None
if pagination:
if pagination.get('sort'):
LOG.warning('Driver does not support sort functionality')
limit = pagination.get('limit')
if limit == 0:
return
q = pymongo_utils.make_events_query_from_filter(event_filter)
if limit is not None:
results = self.db.event.find(q, limit=limit)
else:
results = self.db.event.find(q)
for event in results:
traits = []
for trait in event['traits']:
traits.append(models.Trait(name=trait['trait_name'],
dtype=int(trait['trait_type']),
value=trait['trait_value']))
yield models.Event(message_id=event['_id'],
event_type=event['event_type'],
generated=event['timestamp'],
traits=traits, raw=event.get('raw'))
def get_event_types(self):
"""Return all event types as an iter of strings."""
return self.db.event.distinct('event_type')
def get_trait_types(self, event_type):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event.
"""
trait_names = set()
events = self.db.event.find({'event_type': event_type})
for event in events:
for trait in event['traits']:
trait_name = trait['trait_name']
if trait_name not in trait_names:
# Here we check that our method return only unique
# trait types. Method will return only one trait type. It
# is proposed that certain trait name could have only one
# trait type.
trait_names.add(trait_name)
yield {'name': trait_name,
'data_type': trait['trait_type']}
def get_traits(self, event_type, trait_name=None):
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_name: the name of the Trait to filter by
"""
if not trait_name:
events = self.db.event.find({'event_type': event_type})
else:
# We choose events that simultaneously have event_type and certain
# trait_name, and retrieve events contains only mentioned traits.
events = self.db.event.find({'$and': [{'event_type': event_type},
{'traits.trait_name': trait_name}]},
{'traits': {'$elemMatch':
{'trait_name': trait_name}}
})
for event in events:
for trait in event['traits']:
yield models.Trait(name=trait['trait_name'],
dtype=trait['trait_type'],
value=trait['trait_value'])
panko-7.0.0/panko/storage/impl_hbase.py 0000664 0001750 0001750 00000021325 13551610116 020103 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
from oslo_log import log
from panko.storage import base
from panko.storage.hbase import base as hbase_base
from panko.storage.hbase import utils as hbase_utils
from panko.storage import models
from panko import utils
LOG = log.getLogger(__name__)
AVAILABLE_CAPABILITIES = {
'events': {'query': {'simple': True}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
class Connection(hbase_base.Connection, base.Connection):
"""Put the event data into a HBase database
Collections:
- events:
- row_key: timestamp of event's generation + uuid of event
in format: "%s:%s" % (ts, Event.message_id)
- Column Families:
f: contains the following qualifiers:
- event_type: description of event's type
- timestamp: time stamp of event generation
- all traits for this event in format:
.. code-block:: python
"%s:%s" % (trait_name, trait_type)
"""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
_memory_instance = None
EVENT_TABLE = "event"
def upgrade(self):
tables = [self.EVENT_TABLE]
column_families = {'f': dict(max_versions=1)}
with self.conn_pool.connection() as conn:
hbase_utils.create_tables(conn, tables, column_families)
def clear(self):
LOG.debug('Dropping HBase schema...')
with self.conn_pool.connection() as conn:
for table in [self.EVENT_TABLE]:
try:
conn.disable_table(table)
except Exception:
LOG.debug('Cannot disable table but ignoring error')
try:
conn.delete_table(table)
except Exception:
LOG.debug('Cannot delete table but ignoring error')
def record_events(self, event_models):
"""Write the events to Hbase.
:param event_models: a list of models.Event objects.
"""
error = None
with self.conn_pool.connection() as conn:
events_table = conn.table(self.EVENT_TABLE)
for event_model in event_models:
# Row key consists of timestamp and message_id from
# models.Event or purposes of storage event sorted by
# timestamp in the database.
ts = event_model.generated
row = hbase_utils.prepare_key(
hbase_utils.timestamp(ts, reverse=False),
event_model.message_id)
event_type = event_model.event_type
traits = {}
if event_model.traits:
for trait in event_model.traits:
key = hbase_utils.prepare_key(trait.name, trait.dtype)
traits[key] = trait.value
record = hbase_utils.serialize_entry(traits,
event_type=event_type,
timestamp=ts,
raw=event_model.raw)
try:
events_table.put(row, record)
except Exception as ex:
LOG.exception("Failed to record event: %s", ex)
error = ex
if error:
raise error
def get_events(self, event_filter, pagination=None):
"""Return an iter of models.Event objects.
:param event_filter: storage.EventFilter object, consists of filters
for events that are stored in database.
:param pagination: Pagination parameters.
"""
limit = None
if pagination:
if pagination.get('sort'):
LOG.warning('Driver does not support sort functionality')
limit = pagination.get('limit')
if limit == 0:
return
q, start, stop = hbase_utils.make_events_query_from_filter(
event_filter)
with self.conn_pool.connection() as conn:
events_table = conn.table(self.EVENT_TABLE)
gen = events_table.scan(filter=q, row_start=start, row_stop=stop,
limit=limit)
for event_id, data in gen:
traits = []
events_dict = hbase_utils.deserialize_entry(data)[0]
for key, value in events_dict.items():
if isinstance(key, tuple):
trait_name, trait_dtype = key
traits.append(models.Trait(name=trait_name,
dtype=int(trait_dtype),
value=value))
ts, mess = event_id.split(':')
yield models.Event(
message_id=hbase_utils.unquote(mess),
event_type=events_dict['event_type'],
generated=events_dict['timestamp'],
traits=sorted(traits,
key=operator.attrgetter('dtype')),
raw=events_dict['raw']
)
def get_event_types(self):
"""Return all event types as an iterable of strings."""
with self.conn_pool.connection() as conn:
events_table = conn.table(self.EVENT_TABLE)
gen = events_table.scan()
event_types = set()
for event_id, data in gen:
events_dict = hbase_utils.deserialize_entry(data)[0]
for key, value in events_dict.items():
if not isinstance(key, tuple) and key.startswith('event_type'):
if value not in event_types:
event_types.add(value)
yield value
def get_trait_types(self, event_type):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event
"""
q = hbase_utils.make_query(event_type=event_type)
trait_names = set()
with self.conn_pool.connection() as conn:
events_table = conn.table(self.EVENT_TABLE)
gen = events_table.scan(filter=q)
for event_id, data in gen:
events_dict = hbase_utils.deserialize_entry(data)[0]
for key, value in events_dict.items():
if isinstance(key, tuple):
trait_name, trait_type = key
if trait_name not in trait_names:
# Here we check that our method return only unique
# trait types, for ex. if it is found the same trait
# types in different events with equal event_type,
# method will return only one trait type. It is
# proposed that certain trait name could have only one
# trait type.
trait_names.add(trait_name)
data_type = models.Trait.type_names[int(trait_type)]
yield {'name': trait_name, 'data_type': data_type}
def get_traits(self, event_type, trait_type=None):
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""
q = hbase_utils.make_query(event_type=event_type,
trait_type=trait_type)
with self.conn_pool.connection() as conn:
events_table = conn.table(self.EVENT_TABLE)
gen = events_table.scan(filter=q)
for event_id, data in gen:
events_dict = hbase_utils.deserialize_entry(data)[0]
for key, value in events_dict.items():
if isinstance(key, tuple):
trait_name, trait_type = key
yield models.Trait(name=trait_name,
dtype=int(trait_type), value=value)
panko-7.0.0/panko/storage/base.py 0000664 0001750 0001750 00000007534 13551610116 016720 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for storage engines
"""
import six
import panko
class Model(object):
"""Base class for storage API models."""
def __init__(self, **kwds):
self.fields = list(kwds)
for k, v in six.iteritems(kwds):
setattr(self, k, v)
def as_dict(self):
d = {}
for f in self.fields:
v = getattr(self, f)
if isinstance(v, Model):
v = v.as_dict()
elif isinstance(v, list) and v and isinstance(v[0], Model):
v = [sub.as_dict() for sub in v]
d[f] = v
return d
def __eq__(self, other):
return self.as_dict() == other.as_dict()
def __ne__(self, other):
return not self.__eq__(other)
class Connection(object):
"""Base class for event storage system connections."""
# A dictionary representing the capabilities of this driver.
CAPABILITIES = {
'events': {'query': {'simple': False}},
}
STORAGE_CAPABILITIES = {
'storage': {'production_ready': False},
}
@staticmethod
def __init__(url, conf):
pass
@staticmethod
def upgrade():
"""Migrate the database to `version` or the most recent version."""
@staticmethod
def clear():
"""Clear database."""
@staticmethod
def record_events(events):
"""Write the events to the backend storage system.
:param events: a list of model.Event objects.
"""
raise panko.NotImplementedError('Events not implemented.')
@staticmethod
def get_events(event_filter, pagination=None):
"""Return an iterable of model.Event objects."""
@staticmethod
def get_event_types():
"""Return all event types as an iterable of strings."""
raise panko.NotImplementedError('Events not implemented.')
@staticmethod
def get_trait_types(event_type):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are
returned.
:param event_type: the type of the Event
"""
raise panko.NotImplementedError('Events not implemented.')
@staticmethod
def get_traits(event_type, trait_type=None):
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""
raise panko.NotImplementedError('Events not implemented.')
@classmethod
def get_capabilities(cls):
"""Return an dictionary with the capabilities of each driver."""
return cls.CAPABILITIES
@classmethod
def get_storage_capabilities(cls):
"""Return a dictionary representing the performance capabilities.
This is needed to evaluate the performance of each driver.
"""
return cls.STORAGE_CAPABILITIES
@staticmethod
def clear_expired_data(ttl):
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
raise panko.NotImplementedError('Clearing events not implemented')
panko-7.0.0/panko/storage/models.py 0000664 0001750 0001750 00000010471 13551610116 017263 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Model classes for use in the events storage API.
"""
from oslo_utils import timeutils
import six
from panko.storage import base
def serialize_dt(value):
"""Serializes parameter if it is datetime."""
return value.isoformat() if hasattr(value, 'isoformat') else value
class Event(base.Model):
"""A raw event from the source system. Events have Traits.
Metrics will be derived from one or more Events.
"""
DUPLICATE = 1
UNKNOWN_PROBLEM = 2
INCOMPATIBLE_TRAIT = 3
SUPPORT_DIRS = ('asc', 'desc')
SUPPORT_SORT_KEYS = ('message_id', 'generated')
DEFAULT_DIR = 'asc'
DEFAULT_SORT = [('generated', 'asc'), ('message_id', 'asc')]
PRIMARY_KEY = 'message_id'
def __init__(self, message_id, event_type, generated, traits, raw):
"""Create a new event.
:param message_id: Unique ID for the message this event
stemmed from. This is different than
the Event ID, which comes from the
underlying storage system.
:param event_type: The type of the event.
:param generated: UTC time for when the event occurred.
:param traits: list of Traits on this Event.
:param raw: Unindexed raw notification details.
"""
base.Model.__init__(self, message_id=message_id, event_type=event_type,
generated=generated, traits=traits, raw=raw)
def append_trait(self, trait_model):
self.traits.append(trait_model)
def __repr__(self):
trait_list = []
if self.traits:
trait_list = [six.text_type(trait) for trait in self.traits]
return ("" %
(self.message_id, self.event_type, self.generated,
" ".join(trait_list)))
def serialize(self):
return {'message_id': self.message_id,
'event_type': self.event_type,
'generated': serialize_dt(self.generated),
'traits': [trait.serialize() for trait in self.traits],
'raw': self.raw}
class Trait(base.Model):
"""A Trait is a key/value pair of data on an Event.
The value is variant record of basic data types (int, date, float, etc).
"""
NONE_TYPE = 0
TEXT_TYPE = 1
INT_TYPE = 2
FLOAT_TYPE = 3
DATETIME_TYPE = 4
type_names = {
NONE_TYPE: "none",
TEXT_TYPE: "string",
INT_TYPE: "integer",
FLOAT_TYPE: "float",
DATETIME_TYPE: "datetime"
}
def __init__(self, name, dtype, value):
if not dtype:
dtype = Trait.NONE_TYPE
base.Model.__init__(self, name=name, dtype=dtype, value=value)
def __repr__(self):
return "" % (self.name, self.dtype, self.value)
def serialize(self):
return self.name, self.dtype, serialize_dt(self.value)
def get_type_name(self):
return self.get_name_by_type(self.dtype)
@classmethod
def get_type_by_name(cls, type_name):
return getattr(cls, '%s_TYPE' % type_name.upper(), None)
@classmethod
def get_type_names(cls):
return cls.type_names.values()
@classmethod
def get_name_by_type(cls, type_id):
return cls.type_names.get(type_id, "none")
@classmethod
def convert_value(cls, trait_type, value):
if trait_type is cls.INT_TYPE:
return int(value)
if trait_type is cls.FLOAT_TYPE:
return float(value)
if trait_type is cls.DATETIME_TYPE:
return timeutils.normalize_time(timeutils.parse_isotime(value))
# Cropping the text value to match the TraitText value size
if isinstance(value, six.binary_type):
return value.decode('utf-8')[:255]
return six.text_type(value)[:255]
panko-7.0.0/panko/storage/__init__.py 0000664 0001750 0001750 00000010420 13551610116 017531 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Storage backend management
"""
from oslo_config import cfg
from oslo_log import log
import six
import six.moves.urllib.parse as urlparse
from stevedore import driver
import tenacity
from panko import utils
LOG = log.getLogger(__name__)
OPTS = [
cfg.IntOpt('event_time_to_live',
default=-1,
help=("Number of seconds that events are kept "
"in the database for (<= 0 means forever).")),
cfg.StrOpt('event_connection',
secret=True,
deprecated_for_removal=True,
help='The connection string used to connect '
'to the event database - rather use ${database.connection}'),
]
class StorageUnknownWriteError(Exception):
"""Error raised when an unknown error occurs while recording."""
class StorageBadVersion(Exception):
"""Error raised when the storage backend version is not good enough."""
class StorageBadAggregate(Exception):
"""Error raised when an aggregate is unacceptable to storage backend."""
code = 400
class InvalidMarker(Exception):
"""Invalid pagination marker parameters"""
def get_connection_from_config(conf):
retries = conf.database.max_retries
@tenacity.retry(
reraise=True,
wait=tenacity.wait_fixed(conf.database.retry_interval),
stop=(tenacity.stop_after_attempt(retries) if retries >= 0
else tenacity.stop_never)
)
def _inner():
url = (conf.database.connection or
getattr(conf.database, 'event_connection', None))
return get_connection(url, conf)
return _inner()
def get_connection(url, conf):
"""Return an open connection to the database."""
connection_scheme = urlparse.urlparse(url).scheme
# SqlAlchemy connections specify may specify a 'dialect' or
# 'dialect+driver'. Handle the case where driver is specified.
engine_name = connection_scheme.split('+')[0]
# NOTE: translation not applied bug #1446983
LOG.debug('looking for %(name)r driver in panko.storage',
{'name': engine_name})
mgr = driver.DriverManager('panko.storage', engine_name)
return mgr.driver(url, conf)
class EventFilter(object):
"""Properties for building an Event query.
:param start_timestamp: UTC start datetime (mandatory)
:param end_timestamp: UTC end datetime (mandatory)
:param event_type: the name of the event. None for all.
:param message_id: the message_id of the event. None for all.
:param admin_proj: the project_id of admin role. None if non-admin user.
:param traits_filter: the trait filter dicts, all of which are optional.
This parameter is a list of dictionaries that specify trait values:
.. code-block:: python
{'key': ,
'string': ,
'integer': ,
'datetime': ,
'float': ,
'op': }
"""
def __init__(self, start_timestamp=None, end_timestamp=None,
event_type=None, message_id=None, traits_filter=None,
admin_proj=None):
self.start_timestamp = utils.sanitize_timestamp(start_timestamp)
self.end_timestamp = utils.sanitize_timestamp(end_timestamp)
self.message_id = message_id
self.event_type = event_type
self.traits_filter = traits_filter or []
self.admin_proj = admin_proj
def __repr__(self):
return ("" %
(self.start_timestamp,
self.end_timestamp,
self.event_type,
six.text_type(self.traits_filter)))
panko-7.0.0/panko/storage/hbase/ 0000775 0001750 0001750 00000000000 13551610251 016505 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/hbase/utils.py 0000664 0001750 0001750 00000021771 13551610116 020227 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Various HBase helpers"""
import copy
import datetime
import bson.json_util
try:
from happybase.hbase.ttypes import AlreadyExists
except ImportError:
# import happybase to enable Hbase_thrift module
import happybase # noqa
from Hbase_thrift import AlreadyExists
from oslo_log import log
from oslo_serialization import jsonutils
import six
from panko.i18n import _
LOG = log.getLogger(__name__)
EVENT_TRAIT_TYPES = {'none': 0, 'string': 1, 'integer': 2, 'float': 3,
'datetime': 4}
OP_SIGN = {'eq': '=', 'lt': '<', 'le': '<=', 'ne': '!=', 'gt': '>', 'ge': '>='}
# We need this additional dictionary because we have reverted timestamp in
# row-keys for stored metrics
OP_SIGN_REV = {'eq': '=', 'lt': '>', 'le': '>=', 'ne': '!=', 'gt': '<',
'ge': '<='}
def timestamp(dt, reverse=True):
"""Timestamp is count of milliseconds since start of epoch.
If reverse=True then timestamp will be reversed. Such a technique is used
in HBase rowkey design when period queries are required. Because of the
fact that rows are sorted lexicographically it's possible to vary whether
the 'oldest' entries will be on top of the table or it should be the newest
ones (reversed timestamp case).
:param dt: datetime which is translated to timestamp
:param reverse: a boolean parameter for reverse or straight count of
timestamp in milliseconds
:return: count or reversed count of milliseconds since start of epoch
"""
epoch = datetime.datetime(1970, 1, 1)
td = dt - epoch
ts = td.microseconds + td.seconds * 1000000 + td.days * 86400000000
return 0x7fffffffffffffff - ts if reverse else ts
def make_events_query_from_filter(event_filter):
"""Return start and stop row for filtering and a query.
Query is based on the selected parameter.
:param event_filter: storage.EventFilter object.
"""
start = "%s" % (timestamp(event_filter.start_timestamp, reverse=False)
if event_filter.start_timestamp else "")
stop = "%s" % (timestamp(event_filter.end_timestamp, reverse=False)
if event_filter.end_timestamp else "")
kwargs = {'event_type': event_filter.event_type,
'event_id': event_filter.message_id}
res_q = make_query(**kwargs)
if event_filter.traits_filter:
for trait_filter in event_filter.traits_filter:
q_trait = make_query(trait_query=True, **trait_filter)
if q_trait:
if res_q:
res_q += " AND " + q_trait
else:
res_q = q_trait
return res_q, start, stop
def make_timestamp_query(func, start=None, start_op=None, end=None,
end_op=None, bounds_only=False, **kwargs):
"""Return a filter start and stop row for filtering and a query.
Query is based on the fact that CF-name is 'rts'.
:param start: Optional start timestamp
:param start_op: Optional start timestamp operator, like gt, ge
:param end: Optional end timestamp
:param end_op: Optional end timestamp operator, like lt, le
:param bounds_only: if True than query will not be returned
:param func: a function that provide a format of row
:param kwargs: kwargs for :param func
"""
# We don't need to dump here because get_start_end_rts returns strings
rts_start, rts_end = get_start_end_rts(start, end)
start_row, end_row = func(rts_start, rts_end, **kwargs)
if bounds_only:
return start_row, end_row
q = []
start_op = start_op or 'ge'
end_op = end_op or 'lt'
if rts_start:
q.append("SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')" %
(OP_SIGN_REV[start_op], rts_start))
if rts_end:
q.append("SingleColumnValueFilter ('f', 'rts', %s, 'binary:%s')" %
(OP_SIGN_REV[end_op], rts_end))
res_q = None
if len(q):
res_q = " AND ".join(q)
return start_row, end_row, res_q
def get_start_end_rts(start, end):
rts_start = str(timestamp(start)) if start else ""
rts_end = str(timestamp(end)) if end else ""
return rts_start, rts_end
def make_query(trait_query=None, **kwargs):
"""Return a filter query string based on the selected parameters.
:param trait_query: optional boolean, for trait_query from kwargs
:param kwargs: key-value pairs to filter on. Key should be a real
column name in db
"""
q = []
res_q = None
# Query for traits differs from others. It is constructed with
# SingleColumnValueFilter with the possibility to choose comparison
# operator
if trait_query:
trait_name = kwargs.pop('key')
op = kwargs.pop('op', 'eq')
for k, v in kwargs.items():
if v is not None:
res_q = ("SingleColumnValueFilter "
"('f', '%s', %s, 'binary:%s', true, true)" %
(prepare_key(trait_name, EVENT_TRAIT_TYPES[k]),
OP_SIGN[op], dump(v)))
return res_q
# Note: we use extended constructor for SingleColumnValueFilter here.
# It is explicitly specified that entry should not be returned if CF is not
# found in table.
for key, value in sorted(kwargs.items()):
if value is not None:
if key == 'trait_type':
q.append("ColumnPrefixFilter('%s')" % value)
elif key == 'event_id':
q.append("RowFilter ( = , 'regexstring:\d*:%s')" % value)
else:
q.append("SingleColumnValueFilter "
"('f', '%s', =, 'binary:%s', true, true)" %
(quote(key), dump(value)))
res_q = None
if len(q):
res_q = " AND ".join(q)
return res_q
def prepare_key(*args):
"""Prepares names for rows and columns with correct separator.
:param args: strings or numbers that we want our key construct of
:return: key with quoted args that are separated with character ":"
"""
key_quote = []
for key in args:
if isinstance(key, six.integer_types):
key = str(key)
key_quote.append(quote(key))
return ":".join(key_quote)
def deserialize_entry(entry):
"""Return a list of flatten_result
Flatten_result contains a dict of simple structures such as 'resource_id':1
:param entry: entry from HBase, without row name and timestamp
"""
flatten_result = {}
for k, v in entry.items():
if ':' in k[2:]:
key = tuple([unquote(i) for i in k[2:].split(':')])
else:
key = unquote(k[2:])
flatten_result[key] = load(v)
return flatten_result
def serialize_entry(data=None, **kwargs):
"""Return a dict that is ready to be stored to HBase
:param data: dict to be serialized
:param kwargs: additional args
"""
data = data or {}
entry_dict = copy.copy(data)
entry_dict.update(**kwargs)
return {'f:' + quote(k, ':'): dump(v) for k, v in entry_dict.items()}
def dump(data):
return jsonutils.dumps(data, default=bson.json_util.default)
def load(data):
return jsonutils.loads(data, object_hook=object_hook)
# We don't want to have tzinfo in decoded json.This object_hook is
# overwritten json_util.object_hook for $date
def object_hook(dct):
if "$date" in dct:
dt = bson.json_util.object_hook(dct)
return dt.replace(tzinfo=None)
return bson.json_util.object_hook(dct)
def create_tables(conn, tables, column_families):
for table in tables:
try:
conn.create_table(table, column_families)
except AlreadyExists:
if conn.table_prefix:
table = ("%(table_prefix)s"
"%(separator)s"
"%(table_name)s" %
dict(table_prefix=conn.table_prefix,
separator=conn.table_prefix_separator,
table_name=table))
LOG.warning(_("Cannot create table %(table_name)s "
"it already exists. Ignoring error")
% {'table_name': table})
def quote(s, *args):
"""Return quoted string even if it is unicode one.
:param s: string that should be quoted
:param args: any symbol we want to stay unquoted
"""
s_en = s.encode('utf8')
return six.moves.urllib.parse.quote(s_en, *args)
def unquote(s):
"""Return unquoted and decoded string.
:param s: string that should be unquoted
"""
s_de = six.moves.urllib.parse.unquote(s)
return s_de.decode('utf8')
panko-7.0.0/panko/storage/hbase/base.py 0000664 0001750 0001750 00000006474 13551610116 020004 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import warnings
import happybase
from oslo_log import log
from oslo_utils import netutils
from six.moves.urllib import parse as urlparse
from panko.storage.hbase import inmemory as hbase_inmemory
LOG = log.getLogger(__name__)
class Connection(object):
"""Base connection class for HBase."""
_memory_instance = None
def __init__(self, url):
warnings.warn("Panko's HBase driver is now deprecated. Please use "
"another driver.")
"""Hbase Connection Initialization."""
opts = self._parse_connection_url(url)
if opts['host'] == '__test__':
url = os.environ.get('PANKO_TEST_HBASE_URL')
if url:
# Reparse URL, but from the env variable now
opts = self._parse_connection_url(url)
self.conn_pool = self._get_connection_pool(opts)
else:
# This is a in-memory usage for unit tests
if Connection._memory_instance is None:
LOG.debug('Creating a new in-memory HBase '
'Connection object')
Connection._memory_instance = (hbase_inmemory.
MConnectionPool())
self.conn_pool = Connection._memory_instance
else:
self.conn_pool = self._get_connection_pool(opts)
@staticmethod
def _get_connection_pool(conf):
"""Return a connection pool to the database.
.. note::
The tests use a subclass to override this and return an
in-memory connection pool.
"""
LOG.debug('connecting to HBase on %(host)s:%(port)s',
{'host': conf['host'], 'port': conf['port']})
return happybase.ConnectionPool(
size=100, host=conf['host'], port=conf['port'],
table_prefix=conf['table_prefix'],
table_prefix_separator=conf['table_prefix_separator'])
@staticmethod
def _parse_connection_url(url):
"""Parse connection parameters from a database url.
.. note::
HBase Thrift does not support authentication and there is no
database name, so we are not looking for these in the url.
"""
opts = {}
result = netutils.urlsplit(url)
opts['table_prefix'] = urlparse.parse_qs(
result.query).get('table_prefix', [None])[0]
opts['table_prefix_separator'] = urlparse.parse_qs(
result.query).get('table_prefix_separator', ['_'])[0]
opts['dbtype'] = result.scheme
if ':' in result.netloc:
opts['host'], port = result.netloc.split(':')
else:
opts['host'] = result.netloc
port = 9090
opts['port'] = port and int(port) or 9090
return opts
panko-7.0.0/panko/storage/hbase/__init__.py 0000664 0001750 0001750 00000000000 13551610116 020604 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/hbase/inmemory.py 0000664 0001750 0001750 00000022475 13551610116 020730 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""This is a very crude version of "in-memory HBase", which implements just
enough functionality of HappyBase API to support testing of our driver.
"""
import copy
import re
from oslo_log import log
import six
import panko
LOG = log.getLogger(__name__)
class MTable(object):
"""HappyBase.Table mock."""
def __init__(self, name, families):
self.name = name
self.families = families
self._rows_with_ts = {}
def row(self, key, columns=None):
if key not in self._rows_with_ts:
return {}
res = copy.copy(sorted(six.iteritems(
self._rows_with_ts.get(key)))[-1][1])
if columns:
keys = res.keys()
for key in keys:
if key not in columns:
res.pop(key)
return res
def rows(self, keys):
return ((k, self.row(k)) for k in keys)
def put(self, key, data, ts=None):
# Note: Now we use 'timestamped' but only for one Resource table.
# That's why we may put ts='0' in case when ts is None. If it is
# needed to use 2 types of put in one table ts=0 cannot be used.
if ts is None:
ts = "0"
if key not in self._rows_with_ts:
self._rows_with_ts[key] = {ts: data}
else:
if ts in self._rows_with_ts[key]:
self._rows_with_ts[key][ts].update(data)
else:
self._rows_with_ts[key].update({ts: data})
def delete(self, key):
del self._rows_with_ts[key]
def _get_latest_dict(self, row):
# The idea here is to return latest versions of columns.
# In _rows_with_ts we store {row: {ts_1: {data}, ts_2: {data}}}.
# res will contain a list of tuples [(ts_1, {data}), (ts_2, {data})]
# sorted by ts, i.e. in this list ts_2 is the most latest.
# To get result as HBase provides we should iterate in reverse order
# and get from "latest" data only key-values that are not in newer data
data = {}
for i in sorted(six.iteritems(self._rows_with_ts[row])):
data.update(i[1])
return data
def scan(self, filter=None, columns=None, row_start=None, row_stop=None,
limit=None):
columns = columns or []
sorted_keys = sorted(self._rows_with_ts)
# copy data between row_start and row_stop into a dict
rows = {}
for row in sorted_keys:
if row_start and row < row_start:
continue
if row_stop and row > row_stop:
break
rows[row] = self._get_latest_dict(row)
if columns:
ret = {}
for row, data in six.iteritems(rows):
for key in data:
if key in columns:
ret[row] = data
rows = ret
if filter:
# TODO(jdanjou): we should really parse this properly,
# but at the moment we are only going to support AND here
filters = filter.split('AND')
for f in filters:
# Extract filter name and its arguments
g = re.search("(.*)\((.*),?\)", f)
fname = g.group(1).strip()
fargs = [s.strip().replace('\'', '')
for s in g.group(2).split(',')]
m = getattr(self, fname)
if callable(m):
# overwrite rows for filtering to take effect
# in case of multiple filters
rows = m(fargs, rows)
else:
raise panko.NotImplementedError(
"%s filter is not implemented, "
"you may want to add it!")
for k in sorted(rows)[:limit]:
yield k, rows[k]
@staticmethod
def SingleColumnValueFilter(args, rows):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'SingleColumnValueFilter'
is found in the 'filter' argument.
"""
op = args[2]
column = "%s:%s" % (args[0], args[1])
value = args[3]
if value.startswith('binary:'):
value = value[7:]
r = {}
for row in rows:
data = rows[row]
if op == '=':
if column in data and data[column] == value:
r[row] = data
elif op == '<':
if column in data and data[column] < value:
r[row] = data
elif op == '<=':
if column in data and data[column] <= value:
r[row] = data
elif op == '>':
if column in data and data[column] > value:
r[row] = data
elif op == '>=':
if column in data and data[column] >= value:
r[row] = data
elif op == '!=':
if column in data and data[column] != value:
r[row] = data
return r
@staticmethod
def ColumnPrefixFilter(args, rows):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'ColumnPrefixFilter' is found
in the 'filter' argument.
:param args: a list of filter arguments, contain prefix of column
:param rows: a dict of row prefixes for filtering
"""
value = args[0]
column = 'f:' + value
r = {}
for row, data in rows.items():
column_dict = {}
for key in data:
if key.startswith(column):
column_dict[key] = data[key]
r[row] = column_dict
return r
@staticmethod
def RowFilter(args, rows):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'RowFilter' is found in the
'filter' argument.
:param args: a list of filter arguments, it contains operator and
sought string
:param rows: a dict of rows which are filtered
"""
op = args[0]
value = args[1]
if value.startswith('regexstring:'):
value = value[len('regexstring:'):]
r = {}
for row, data in rows.items():
try:
g = re.search(value, row).group()
if op == '=':
if g == row:
r[row] = data
else:
raise panko.NotImplementedError(
"In-memory "
"RowFilter doesn't support "
"the %s operation yet" % op)
except AttributeError:
pass
return r
@staticmethod
def QualifierFilter(args, rows):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'QualifierFilter' is found in
the 'filter' argument
"""
op = args[0]
value = args[1]
is_regex = False
if value.startswith('binaryprefix:'):
value = value[len('binaryprefix:'):]
if value.startswith('regexstring:'):
value = value[len('regexstring:'):]
is_regex = True
column = 'f:' + value
r = {}
for row in rows:
data = rows[row]
r_data = {}
for key in data:
if ((op == '=' and key.startswith(column)) or
(op == '>=' and key >= column) or
(op == '<=' and key <= column) or
(op == '>' and key > column) or
(op == '<' and key < column) or
(is_regex and re.search(value, key))):
r_data[key] = data[key]
else:
raise panko.NotImplementedError(
"In-memory QualifierFilter "
"doesn't support the %s "
"operation yet" % op)
if r_data:
r[row] = r_data
return r
class MConnectionPool(object):
def __init__(self):
self.conn = MConnection()
def connection(self):
return self.conn
class MConnection(object):
"""HappyBase.Connection mock."""
def __init__(self):
self.tables = {}
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
@staticmethod
def open():
LOG.debug("Opening in-memory HBase connection")
def create_table(self, n, families=None):
families = families or {}
if n in self.tables:
return self.tables[n]
t = MTable(n, families)
self.tables[n] = t
return t
def delete_table(self, name, use_prefix=True):
del self.tables[name]
def table(self, name):
return self.create_table(name)
panko-7.0.0/panko/storage/sqlalchemy/ 0000775 0001750 0001750 00000000000 13551610251 017565 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/sqlalchemy/alembic/ 0000775 0001750 0001750 00000000000 13551610251 021161 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/sqlalchemy/alembic/script.py.mako 0000664 0001750 0001750 00000000756 13551610116 023775 0 ustar zuul zuul 0000000 0000000 """${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
panko-7.0.0/panko/storage/sqlalchemy/alembic/versions/ 0000775 0001750 0001750 00000000000 13551610251 023031 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/sqlalchemy/alembic/versions/c3955547bff2_support_big_integer_traits.py 0000664 0001750 0001750 00000001623 13551610116 032675 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""support big integer traits
Revision ID: c3955547bff2
Revises:
Create Date: 2017-07-18 22:03:44.996571
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c3955547bff2'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('trait_int', "value", type_=sa.BigInteger)
panko-7.0.0/panko/storage/sqlalchemy/alembic/alembic.ini 0000664 0001750 0001750 00000003210 13551610116 023252 0 ustar zuul zuul 0000000 0000000 # A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = panko.storage.sqlalchemy:alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
panko-7.0.0/panko/storage/sqlalchemy/alembic/env.py 0000664 0001750 0001750 00000005216 13551610116 022327 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import with_statement
import os
from alembic import config as alembic_config
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = alembic_config.Config(os.path.join(os.path.dirname(__file__),
'alembic.ini'))
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
panko-7.0.0/panko/storage/sqlalchemy/alembic/README 0000664 0001750 0001750 00000000046 13551610116 022041 0 ustar zuul zuul 0000000 0000000 Generic single-database configuration. panko-7.0.0/panko/storage/sqlalchemy/models.py 0000664 0001750 0001750 00000012766 13551610116 021436 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for Panko data.
"""
from oslo_serialization import jsonutils
import six
import sqlalchemy
from sqlalchemy import Column, Integer, String, ForeignKey, Index
from sqlalchemy import BigInteger, Float, DateTime
from sqlalchemy.dialects.mysql import DECIMAL
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from sqlalchemy.types import TypeDecorator
from panko import utils
class JSONEncodedDict(TypeDecorator):
"""Represents an immutable structure as a json-encoded string."""
impl = sqlalchemy.Text
@staticmethod
def process_bind_param(value, dialect):
if value is not None:
value = jsonutils.dumps(value)
return value
@staticmethod
def process_result_value(value, dialect):
if value is not None:
value = jsonutils.loads(value)
return value
class PreciseTimestamp(TypeDecorator):
"""Represents a timestamp precise to the microsecond."""
impl = DateTime
def load_dialect_impl(self, dialect):
if dialect.name == 'mysql':
return dialect.type_descriptor(DECIMAL(precision=20,
scale=6,
asdecimal=True))
return self.impl
@staticmethod
def process_bind_param(value, dialect):
if value is None:
return value
elif dialect.name == 'mysql':
return utils.dt_to_decimal(value)
return value
@staticmethod
def process_result_value(value, dialect):
if value is None:
return value
elif dialect.name == 'mysql':
return utils.decimal_to_dt(value)
return value
class PankoBase(object):
"""Base class for Panko Models."""
__table_args__ = {'mysql_charset': "utf8",
'mysql_engine': "InnoDB"}
__table_initialized__ = False
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in six.iteritems(values):
setattr(self, k, v)
Base = declarative_base(cls=PankoBase)
class EventType(Base):
"""Types of event records."""
__tablename__ = 'event_type'
id = Column(Integer, primary_key=True)
desc = Column(String(255), unique=True)
def __init__(self, event_type):
self.desc = event_type
def __repr__(self):
return "" % self.desc
class Event(Base):
__tablename__ = 'event'
__table_args__ = (
Index('ix_event_message_id', 'message_id'),
Index('ix_event_type_id', 'event_type_id'),
Index('ix_event_generated', 'generated')
)
id = Column(Integer, primary_key=True)
message_id = Column(String(50), unique=True)
generated = Column(PreciseTimestamp())
raw = deferred(Column(JSONEncodedDict()))
event_type_id = Column(Integer, ForeignKey('event_type.id'))
event_type = relationship("EventType", backref='events')
def __init__(self, message_id, event_type, generated, raw):
self.message_id = message_id
self.event_type = event_type
self.generated = generated
self.raw = raw
def __repr__(self):
return "" % (self.id,
self.message_id,
self.event_type,
self.generated)
class TraitText(Base):
"""Event text traits."""
__tablename__ = 'trait_text'
__table_args__ = (
Index('ix_trait_text_event_id_key', 'event_id', 'key'),
)
event_id = Column(Integer, ForeignKey('event.id'), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(String(255))
class TraitInt(Base):
"""Event integer traits."""
__tablename__ = 'trait_int'
__table_args__ = (
Index('ix_trait_int_event_id_key', 'event_id', 'key'),
)
event_id = Column(Integer, ForeignKey('event.id'), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(BigInteger)
class TraitFloat(Base):
"""Event float traits."""
__tablename__ = 'trait_float'
__table_args__ = (
Index('ix_trait_float_event_id_key', 'event_id', 'key'),
)
event_id = Column(Integer, ForeignKey('event.id'), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(Float(53))
class TraitDatetime(Base):
"""Event datetime traits."""
__tablename__ = 'trait_datetime'
__table_args__ = (
Index('ix_trait_datetime_event_id_key', 'event_id', 'key'),
)
event_id = Column(Integer, ForeignKey('event.id'), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(PreciseTimestamp())
panko-7.0.0/panko/storage/sqlalchemy/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021664 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/storage/impl_elasticsearch.py 0000664 0001750 0001750 00000030430 13551610116 021630 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import operator
import elasticsearch as es
from elasticsearch import helpers
from oslo_log import log
from oslo_utils import netutils
from oslo_utils import timeutils
import six
from panko import storage
from panko.storage import base
from panko.storage import models
from panko import utils
LOG = log.getLogger(__name__)
AVAILABLE_CAPABILITIES = {
'events': {'query': {'simple': True}},
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
}
class Connection(base.Connection):
"""Put the event data into an ElasticSearch db.
Events in ElasticSearch are indexed by day and stored by event_type.
An example document::
{"_index":"events_2014-10-21",
"_type":"event_type0",
"_id":"dc90e464-65ab-4a5d-bf66-ecb956b5d779",
"_score":1.0,
"_source":{"timestamp": "2014-10-21T20:02:09.274797"
"traits": {"id4_0": "2014-10-21T20:02:09.274797",
"id3_0": 0.7510790937279408,
"id2_0": 5,
"id1_0": "18c97ba1-3b74-441a-b948-a702a30cbce2"}
}
}
"""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
STORAGE_CAPABILITIES = utils.update_nested(
base.Connection.STORAGE_CAPABILITIES,
AVAILABLE_STORAGE_CAPABILITIES,
)
# NOTE(gordc): mainly for testing, data is not searchable after write,
# it is only searchable after periodic refreshes.
_refresh_on_write = False
def __init__(self, url, conf):
url_split = netutils.urlsplit(url)
use_ssl = conf.database.es_ssl_enabled
self.index_name = conf.database.es_index_name
self.conn = es.Elasticsearch(hosts=url_split.netloc + url_split.path,
use_ssl=use_ssl)
def upgrade(self):
iclient = es.client.IndicesClient(self.conn)
ts_template = {
'template': '*',
'mappings': {'_default_':
{'properties': {'traits': {'type': 'nested'}}}}}
iclient.put_template(name='enable_timestamp', body=ts_template)
def record_events(self, events):
def _build_bulk_index(event_list):
for ev in event_list:
traits = {t.name: t.value for t in ev.traits}
yield {'_op_type': 'create',
'_index': '%s_%s' % (self.index_name,
ev.generated.date().isoformat()),
'_type': ev.event_type,
'_id': ev.message_id,
'_source': {'timestamp': ev.generated.isoformat(),
'traits': traits,
'raw': ev.raw}}
error = None
for ok, result in helpers.streaming_bulk(
self.conn, _build_bulk_index(events)):
if not ok:
__, result = result.popitem()
if result['status'] == 409:
LOG.info('Duplicate event detected, skipping it: %s',
result)
else:
LOG.exception('Failed to record event: %s', result)
error = storage.StorageUnknownWriteError(result)
if self._refresh_on_write:
self.conn.indices.refresh(index='%s_*' % self.index_name)
while self.conn.cluster.pending_tasks(local=True)['tasks']:
pass
if error:
raise error
def _make_dsl_from_filter(self, indices, ev_filter):
q_args = {}
filters = []
if ev_filter.start_timestamp:
filters.append({'range': {'timestamp':
{'ge': ev_filter.start_timestamp.isoformat()}}})
while indices[0] < (
'%s_%s' % (self.index_name,
ev_filter.start_timestamp.date().isoformat())):
del indices[0]
if ev_filter.end_timestamp:
filters.append({'range': {'timestamp':
{'le': ev_filter.end_timestamp.isoformat()}}})
while indices[-1] > (
'%s_%s' % (self.index_name,
ev_filter.end_timestamp.date().isoformat())):
del indices[-1]
q_args['index'] = indices
if ev_filter.event_type:
q_args['doc_type'] = ev_filter.event_type
if ev_filter.message_id:
filters.append({'term': {'_id': ev_filter.message_id}})
if ev_filter.traits_filter or ev_filter.admin_proj:
or_cond = []
trait_filters = []
for t_filter in ev_filter.traits_filter or []:
value = None
for val_type in ['integer', 'string', 'float', 'datetime']:
if t_filter.get(val_type):
value = t_filter.get(val_type)
if isinstance(value, six.string_types):
value = value.lower()
elif isinstance(value, datetime.datetime):
value = value.isoformat()
break
if t_filter.get('op') in ['gt', 'ge', 'lt', 'le']:
op = (t_filter.get('op').replace('ge', 'gte')
.replace('le', 'lte'))
trait_filters.append(
{'range': {
"traits.%s" % t_filter['key']: {op: value}}})
else:
tf = {"query": {"query_string": {
"query": "traits.%s: \"%s\"" % (t_filter['key'], value)
}}}
if t_filter.get('op') == 'ne':
tf = {"not": tf}
trait_filters.append(tf)
if ev_filter.admin_proj:
or_cond = [{'missing': {'field': 'traits.project_id'}},
{'term': {
'traits.project_id': ev_filter.admin_proj}}]
filters.append(
{'nested': {'path': 'traits', 'query': {'filtered': {
'filter': {'bool': {'must': trait_filters,
'should': or_cond}}}}}})
q_args['body'] = {'query': {'filtered':
{'filter': {'bool': {'must': filters}}}}}
return q_args
def get_events(self, event_filter, pagination=None):
limit = None
if pagination:
if pagination.get('sort'):
LOG.warning('Driver does not support sort functionality')
limit = pagination.get('limit')
if limit == 0:
return
iclient = es.client.IndicesClient(self.conn)
indices = iclient.get_mapping('%s_*' % self.index_name).keys()
if indices:
filter_args = self._make_dsl_from_filter(indices, event_filter)
if limit is not None:
filter_args['size'] = limit
results = self.conn.search(fields=['_id', 'timestamp',
'_type', '_source'],
sort='timestamp:asc',
**filter_args)
trait_mappings = {}
for record in results['hits']['hits']:
trait_list = []
if not record['_type'] in trait_mappings:
trait_mappings[record['_type']] = list(
self.get_trait_types(record['_type']))
for key in record['_source']['traits'].keys():
value = record['_source']['traits'][key]
for t_map in trait_mappings[record['_type']]:
if t_map['name'] == key:
dtype = t_map['data_type']
break
else:
dtype = models.Trait.TEXT_TYPE
trait_list.append(models.Trait(
name=key, dtype=dtype,
value=models.Trait.convert_value(dtype, value)))
gen_ts = timeutils.normalize_time(timeutils.parse_isotime(
record['_source']['timestamp']))
yield models.Event(message_id=record['_id'],
event_type=record['_type'],
generated=gen_ts,
traits=sorted(
trait_list,
key=operator.attrgetter('dtype')),
raw=record['_source']['raw'])
def get_event_types(self):
iclient = es.client.IndicesClient(self.conn)
es_mappings = iclient.get_mapping('%s_*' % self.index_name)
seen_types = set()
for index in es_mappings.keys():
for ev_type in es_mappings[index]['mappings'].keys():
seen_types.add(ev_type)
# TODO(gordc): tests assume sorted ordering but backends are not
# explicitly ordered.
# NOTE: _default_ is a type that appears in all mappings but is not
# real 'type'
seen_types.discard('_default_')
return sorted(list(seen_types))
@staticmethod
def _remap_es_types(d_type):
if d_type == 'string':
d_type = 'text'
elif d_type == 'long':
d_type = 'int'
elif d_type == 'double':
d_type = 'float'
elif d_type == 'date' or d_type == 'date_time':
d_type = 'datetime'
return d_type
def get_trait_types(self, event_type):
iclient = es.client.IndicesClient(self.conn)
es_mappings = iclient.get_mapping('%s_*' % self.index_name)
seen_types = []
for index in es_mappings.keys():
# if event_type exists in index and has traits
if (es_mappings[index]['mappings'].get(event_type) and
es_mappings[index]['mappings'][event_type]['properties']
['traits'].get('properties')):
for t_type in (es_mappings[index]['mappings'][event_type]
['properties']['traits']['properties'].keys()):
d_type = (es_mappings[index]['mappings'][event_type]
['properties']['traits']['properties']
[t_type]['type'])
d_type = models.Trait.get_type_by_name(
self._remap_es_types(d_type))
if (t_type, d_type) not in seen_types:
yield {'name': t_type, 'data_type': d_type}
seen_types.append((t_type, d_type))
def get_traits(self, event_type, trait_type=None):
t_types = dict((res['name'], res['data_type'])
for res in self.get_trait_types(event_type))
if not t_types or (trait_type and trait_type not in t_types.keys()):
return
result = self.conn.search('%s_*' % self.index_name, event_type)
for ev in result['hits']['hits']:
if trait_type and ev['_source']['traits'].get(trait_type):
yield models.Trait(
name=trait_type,
dtype=t_types[trait_type],
value=models.Trait.convert_value(
t_types[trait_type],
ev['_source']['traits'][trait_type]))
else:
for trait in ev['_source']['traits'].keys():
yield models.Trait(
name=trait,
dtype=t_types[trait],
value=models.Trait.convert_value(
t_types[trait],
ev['_source']['traits'][trait]))
panko-7.0.0/panko/version.py 0000664 0001750 0001750 00000001204 13551610116 016013 0 ustar zuul zuul 0000000 0000000 #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('panko')
panko-7.0.0/panko/cmd/ 0000775 0001750 0001750 00000000000 13551610251 014522 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/cmd/storage.py 0000664 0001750 0001750 00000002273 13551610116 016544 0 ustar zuul zuul 0000000 0000000 # -*- encoding: utf-8 -*-
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from panko import service
from panko import storage
LOG = log.getLogger(__name__)
def dbsync():
conf = service.prepare_service()
storage.get_connection_from_config(conf).upgrade()
def expirer():
conf = service.prepare_service()
if conf.database.event_time_to_live > 0:
LOG.debug("Clearing expired event data")
conn = storage.get_connection_from_config(conf)
conn.clear_expired_data(conf.database.event_time_to_live)
else:
LOG.info("Nothing to clean, database event time to live "
"is disabled")
panko-7.0.0/panko/cmd/__init__.py 0000664 0001750 0001750 00000000000 13551610116 016621 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/profiler.py 0000664 0001750 0001750 00000004345 13551610116 016161 0 ustar zuul zuul 0000000 0000000 # Copyright 2017 Fujitsu Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from oslo_log import log
from oslo_utils import importutils
import webob.dec
profiler = importutils.try_import('osprofiler.profiler')
profiler_initializer = importutils.try_import('osprofiler.initializer')
profiler_web = importutils.try_import('osprofiler.web')
LOG = log.getLogger(__name__)
class WsgiMiddleware(object):
def __init__(self, application, **kwargs):
self.application = application
@classmethod
def factory(cls, global_conf, **local_conf):
if profiler_web:
return profiler_web.WsgiMiddleware.factory(global_conf)
def filter_(app):
return cls(app)
return filter_
@webob.dec.wsgify
def __call__(self, request):
return request.get_response(self.application)
def setup(conf):
if hasattr(conf, 'profiler') and conf.profiler.enabled:
profiler_initializer.init_from_conf(
conf=conf,
context={},
project=conf.project,
service=conf.prog,
host=socket.gethostbyname(socket.gethostname()))
LOG.info('OSprofiler is enabled.')
def trace_cls(name, **kwargs):
"""Wrap the OSprofiler trace_cls.
Wrap the OSprofiler trace_cls decorator so that it will not try to
patch the class unless OSprofiler is present.
:param name: The name of action. For example, wsgi, rpc, db, ...
:param kwargs: Any other keyword args used by profiler.trace_cls
"""
def decorator(cls):
if profiler:
trace_decorator = profiler.trace_cls(name, **kwargs)
return trace_decorator(cls)
return cls
return decorator
panko-7.0.0/panko/conf/ 0000775 0001750 0001750 00000000000 13551610251 014704 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/conf/defaults.py 0000664 0001750 0001750 00000002574 13551610116 017075 0 ustar zuul zuul 0000000 0000000 # Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import cors
def set_cors_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
cors.set_defaults(
allow_headers=['X-Auth-Token',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id',
'X-Openstack-Request-Id'],
expose_headers=['X-Auth-Token',
'X-Subject-Token',
'X-Service-Token',
'X-Openstack-Request-Id'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)
panko-7.0.0/panko/conf/__init__.py 0000664 0001750 0001750 00000000000 13551610116 017003 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/hacking/ 0000775 0001750 0001750 00000000000 13551610251 015363 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/hacking/__init__.py 0000664 0001750 0001750 00000000000 13551610116 017462 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/hacking/checks.py 0000664 0001750 0001750 00000003277 13551610116 017206 0 ustar zuul zuul 0000000 0000000 # Copyright (c) 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Guidelines for writing new hacking checks
- Use only for Panko specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range X3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the C3xx value.
- List the new rule in the top level HACKING.rst file
"""
def no_log_warn(logical_line):
"""Disallow 'LOG.warn('
https://bugs.launchpad.net/tempest/+bug/1508442
C301
"""
if logical_line.startswith('LOG.warn('):
yield(0, 'C301 Use LOG.warning() rather than LOG.warn()')
def no_os_popen(logical_line):
"""Disallow 'os.popen('
Deprecated library function os.popen() Replace it using subprocess
https://bugs.launchpad.net/tempest/+bug/1529836
C302
"""
if 'os.popen(' in logical_line:
yield(0, 'C302 Deprecated library function os.popen(). '
'Replace it using subprocess module. ')
def factory(register):
register(no_log_warn)
register(no_os_popen)
panko-7.0.0/panko/opts.py 0000664 0001750 0001750 00000003765 13551610116 015331 0 ustar zuul zuul 0000000 0000000 # Copyright 2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import panko.storage
import panko.utils
STORAGE_OPTS = [
cfg.IntOpt('max_retries',
default=10,
deprecated_group='database',
help='Maximum number of connection retries during startup. '
'Set to -1 to specify an infinite retry count.'),
cfg.IntOpt('retry_interval',
default=10,
deprecated_group='database',
help='Interval (in seconds) between retries of connection.'),
cfg.BoolOpt('es_ssl_enabled',
default=False,
help="Enable HTTPS connection in the Elasticsearch "
"connection"),
cfg.StrOpt('es_index_name',
default='events',
help='The name of the index in Elasticsearch'),
]
def list_opts():
return [
('DEFAULT',
[
# FIXME(jd) Move to [api]
cfg.StrOpt('api_paste_config',
default="api_paste.ini",
help="Configuration file for WSGI definition of API."),
]),
('api',
[
cfg.IntOpt('default_api_return_limit',
min=1,
default=100,
help='Default maximum number of '
'items returned by API request.'),
]),
('database', panko.storage.OPTS),
('storage', STORAGE_OPTS),
]
panko-7.0.0/panko/service.py 0000664 0001750 0001750 00000003722 13551610116 015775 0 ustar zuul zuul 0000000 0000000 # Copyright 2012-2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_db import options as db_options
import oslo_i18n
from oslo_log import log
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from panko.conf import defaults
from panko import opts
from panko import profiler
from panko import version
profiler_opts = importutils.try_import('osprofiler.opts')
def prepare_service(argv=None, config_files=None, share=False):
conf = cfg.ConfigOpts()
for group, options in opts.list_opts():
conf.register_opts(list(options),
group=None if group == "DEFAULT" else group)
db_options.set_defaults(conf)
if profiler_opts:
profiler_opts.set_defaults(conf)
if not share:
defaults.set_cors_middleware_defaults()
oslo_i18n.enable_lazy()
log.register_options(conf)
if argv is None:
argv = sys.argv
conf(argv[1:], project='panko', validate_default_values=True,
version=version.version_info.version_string(),
default_config_files=config_files)
if not share:
log.setup(conf, 'panko')
profiler.setup(conf)
# NOTE(liusheng): guru cannot run with service under apache daemon, so when
# panko-api running with mod_wsgi, the argv is [], we don't start
# guru.
if argv:
gmr.TextGuruMeditation.setup_autorun(version)
return conf
panko-7.0.0/panko/__init__.py 0000664 0001750 0001750 00000001461 13551610116 016072 0 ustar zuul zuul 0000000 0000000 # Copyright 2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501
panko-7.0.0/panko/api/ 0000775 0001750 0001750 00000000000 13551610251 014530 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/app.py 0000664 0001750 0001750 00000004617 13551610116 015672 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
from oslo_config import cfg
from oslo_log import log
from paste import deploy
import pecan
from panko.api import hooks
from panko.api import middleware
from panko import service
LOG = log.getLogger(__name__)
def setup_app(root, conf):
app_hooks = [hooks.ConfigHook(conf),
hooks.DBHook(conf),
hooks.TranslationHook()]
return pecan.make_app(
root,
hooks=app_hooks,
wrap_app=middleware.ParsableErrorMiddleware,
guess_content_type_from_ext=False
)
# NOTE(sileht): pastedeploy uses ConfigParser to handle
# global_conf, since python 3 ConfigParser doesn't
# allow to store object as config value, only strings are
# permit, so to be able to pass an object created before paste load
# the app, we store them into a global var. But the each loaded app
# store it's configuration in unique key to be concurrency safe.
global APPCONFIGS
APPCONFIGS = {}
def load_app(conf, appname='panko+keystone'):
global APPCONFIGS
# Build the WSGI app
cfg_path = conf.api_paste_config
if not os.path.isabs(cfg_path):
cfg_path = conf.find_file(cfg_path)
if cfg_path is None or not os.path.exists(cfg_path):
raise cfg.ConfigFilesNotFoundError([conf.api_paste_config])
config = dict(conf=conf)
configkey = str(uuid.uuid4())
APPCONFIGS[configkey] = config
LOG.info("Full WSGI config used: %s" % cfg_path)
return deploy.loadapp("config:" + cfg_path, name=appname,
global_conf={'configkey': configkey})
def build_wsgi_app(argv=None):
return load_app(service.prepare_service(argv=argv))
def app_factory(global_config, **local_conf):
global APPCONFIGS
conf = APPCONFIGS.get(global_config.get('configkey'))
return setup_app(root=local_conf.get('root'), **conf)
panko-7.0.0/panko/api/middleware.py 0000664 0001750 0001750 00000012316 13551610116 017222 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 IBM Corp.
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Middleware to replace the plain text message body of an error
response with one formatted so the client can parse it.
Based on pecan.middleware.errordocument
"""
from lxml import etree
from oslo_log import log
from oslo_serialization import jsonutils
import six
import webob
from panko import i18n
LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse."""
@staticmethod
def best_match_language(accept_language):
"""Determines best available locale from the Accept-Language header.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
"""
if not accept_language:
return None
all_languages = i18n.get_available_languages()
return accept_language.best_match(all_languages)
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
# Request for this state, modified by replace_start_response()
# and used when an error is being reported.
state = {}
def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable."""
try:
status_code = int(status.split(' ')[0])
state['status_code'] = status_code
except (ValueError, TypeError): # pragma: nocover
raise Exception((
'ErrorDocumentMiddleware received an invalid '
'status %s' % status
))
else:
if (state['status_code'] // 100) not in (2, 3):
# Remove some headers so we can replace them later
# when we have the full error message and can
# compute the length.
headers = [(h, v)
for (h, v) in headers
if h not in ('Content-Length', 'Content-Type')
]
# Save the headers in case we need to modify them.
state['headers'] = headers
return start_response(status, headers, exc_info)
app_iter = self.app(environ, replacement_start_response)
if (state['status_code'] // 100) not in (2, 3):
req = webob.Request(environ)
error = environ.get('translatable_error')
user_locale = self.best_match_language(req.accept_language)
if (req.accept.best_match(['application/json', 'application/xml'])
== 'application/xml'):
content_type = 'application/xml'
try:
# simple check xml is valid
fault = etree.fromstring(b'\n'.join(app_iter))
# Add the translated error to the xml data
if error is not None:
for fault_string in fault.findall('faultstring'):
fault_string.text = i18n.translate(error,
user_locale)
error_message = etree.tostring(fault)
body = b''.join((b'',
error_message,
b''))
except etree.XMLSyntaxError as err:
LOG.error('Error parsing HTTP response: %s', err)
error_message = state['status_code']
body = '%s' % error_message
if six.PY3:
body = body.encode('utf-8')
else:
content_type = 'application/json'
app_data = b'\n'.join(app_iter)
if six.PY3:
app_data = app_data.decode('utf-8')
try:
fault = jsonutils.loads(app_data)
if error is not None and 'faultstring' in fault:
fault['faultstring'] = i18n.translate(error,
user_locale)
except ValueError as err:
fault = app_data
body = jsonutils.dumps({'error_message': fault})
if six.PY3:
body = body.encode('utf-8')
state['headers'].append(('Content-Length', str(len(body))))
state['headers'].append(('Content-Type', content_type))
body = [body]
else:
body = app_iter
return body
panko-7.0.0/panko/api/hooks.py 0000664 0001750 0001750 00000003203 13551610116 016223 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pecan import hooks
from panko import storage
class ConfigHook(hooks.PecanHook):
"""Attach the configuration object to the request.
That allows controllers to get it.
"""
def __init__(self, conf):
super(ConfigHook, self).__init__()
self.conf = conf
def before(self, state):
state.request.cfg = self.conf
class DBHook(hooks.PecanHook):
def __init__(self, conf):
self.connection = storage.get_connection_from_config(
conf)
def before(self, state):
state.request.conn = self.connection
class TranslationHook(hooks.PecanHook):
def after(self, state):
# After a request has been done, we need to see if
# ClientSideError has added an error onto the response.
# If it has we need to get it info the thread-safe WSGI
# environ to be used by the ParsableErrorMiddleware.
if hasattr(state.response, 'translatable_error'):
state.request.environ['translatable_error'] = (
state.response.translatable_error)
panko-7.0.0/panko/api/app.wsgi 0000664 0001750 0001750 00000001357 13551610116 016211 0 ustar zuul zuul 0000000 0000000 # -*- mode: python -*-
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Use this file for deploying the API under mod_wsgi."""
from panko.api import app
application = app.build_wsgi_app(argv=[])
panko-7.0.0/panko/api/__init__.py 0000664 0001750 0001750 00000000000 13551610116 016627 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/controllers/ 0000775 0001750 0001750 00000000000 13551610251 017076 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/controllers/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021175 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/controllers/v2/ 0000775 0001750 0001750 00000000000 13551610251 017425 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/controllers/v2/events.py 0000664 0001750 0001750 00000027711 13551610116 021313 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_log import log
from oslo_utils import strutils
import pecan
from pecan import rest
import six
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from panko.api.controllers.v2 import base
from panko.api.controllers.v2 import utils as v2_utils
from panko.api import rbac
from panko.i18n import _
from panko import profiler
from panko import storage
from panko.storage import models as event_models
LOG = log.getLogger(__name__)
class TraitDescription(base.Base):
"""A description of a trait, with no associated value."""
type = wtypes.text
"the data type, defaults to string"
name = wtypes.text
"the name of the trait"
@classmethod
def sample(cls):
return cls(name='service',
type='string'
)
class EventQuery(base.Query):
"""Query arguments for Event Queries."""
_supported_types = ['integer', 'float', 'string', 'datetime']
type = wsme.wsattr(wtypes.text, default='string')
"the type of the trait filter, defaults to string"
field = wsme.wsattr(wtypes.text)
'''
Name of the field to filter on. Can be either a trait name or field of an
event.
1) Use start_timestamp/end_timestamp to filter on `generated` field.
2) Specify the 'all_tenants=True' query parameter to get all events for all
projects, this is only allowed by admin users.
'''
def __repr__(self):
# for logging calls
return '' % (self.field,
self.op,
self._get_value_as_type(),
self.type)
@classmethod
def sample(cls):
return cls(field="event_type",
type="string",
op="eq",
value="compute.instance.create.start")
class Trait(base.Base):
"""A Trait associated with an event."""
name = wtypes.text
"The name of the trait"
value = wtypes.text
"the value of the trait"
type = wtypes.text
"the type of the trait (string, integer, float or datetime)"
@staticmethod
def _convert_storage_trait(trait):
"""Helper method to convert a storage model into an API trait instance.
If an API trait instance is passed in, just return it.
"""
if isinstance(trait, Trait):
return trait
value = (six.text_type(trait.value)
if not trait.dtype == event_models.Trait.DATETIME_TYPE
else trait.value.isoformat())
trait_type = event_models.Trait.get_name_by_type(trait.dtype)
return Trait(name=trait.name, type=trait_type, value=value)
@classmethod
def sample(cls):
return cls(name='service',
type='string',
value='compute.hostname'
)
class Event(base.Base):
"""A System event."""
message_id = wtypes.text
"The message ID for the notification"
event_type = wtypes.text
"The type of the event"
_traits = None
def get_traits(self):
return self._traits
def set_traits(self, traits):
self._traits = map(Trait._convert_storage_trait, traits)
traits = wsme.wsproperty(wtypes.ArrayType(Trait),
get_traits,
set_traits)
"Event specific properties"
generated = datetime.datetime
"The time the event occurred"
raw = base.JsonType()
"The raw copy of notification"
@classmethod
def sample(cls):
return cls(
event_type='compute.instance.update',
generated=datetime.datetime(2015, 1, 1, 12, 0, 0, 0),
message_id='94834db1-8f1b-404d-b2ec-c35901f1b7f0',
traits={
Trait(name='request_id',
value='req-4e2d67b8-31a4-48af-bb2f-9df72a353a72'),
Trait(name='service',
value='conductor.tem-devstack-01'),
Trait(name='tenant_id',
value='7f13f2b17917463b9ee21aa92c4b36d6')
},
raw={'status': {'nested': 'started'}}
)
def _build_rbac_query_filters():
filters = {'t_filter': [], 'admin_proj': None}
# Returns user_id, proj_id for non-admins
user_id, proj_id = rbac.get_limited_to(pecan.request.headers)
# If non-admin, filter events by user and project
if user_id and proj_id:
filters['t_filter'].append({"key": "project_id", "string": proj_id,
"op": "eq"})
filters['t_filter'].append({"key": "user_id", "string": user_id,
"op": "eq"})
elif not user_id and not proj_id:
filters['admin_proj'] = pecan.request.headers.get('X-Project-Id')
return filters
def _event_query_to_event_filter(q):
evt_model_filter = {
'event_type': None,
'message_id': None,
'start_timestamp': None,
'end_timestamp': None
}
filters = _build_rbac_query_filters()
traits_filter = filters['t_filter']
admin_proj = filters['admin_proj']
for i in q:
if not i.op:
i.op = 'eq'
elif i.op not in base.operation_kind:
error = (_('Operator %(operator)s is not supported. The supported'
' operators are: %(supported)s') %
{'operator': i.op, 'supported': base.operation_kind})
raise base.ClientSideError(error)
if i.field in evt_model_filter:
if i.op != 'eq' and i.field in ('event_type', 'message_id'):
error = (_('Operator %(operator)s is not supported. Only'
' `eq\' operator is available for field'
' %(field)s') %
{'operator': i.op, 'field': i.field})
raise base.ClientSideError(error)
if i.op != 'ge' and i.field == 'start_timestamp':
error = (_('Operator %(operator)s is not supported. Only'
' `ge\' operator is available for field'
' %(field)s') %
{'operator': i.op, 'field': i.field})
raise base.ClientSideError(error)
if i.op != 'le' and i.field == 'end_timestamp':
error = (_('Operator %(operator)s is not supported. Only'
' `le\' operator is available for field'
' %(field)s') %
{'operator': i.op, 'field': i.field})
raise base.ClientSideError(error)
evt_model_filter[i.field] = i.value
elif i.field == 'all_tenants' and admin_proj:
all_tenants = strutils.bool_from_string(i.value)
if all_tenants:
admin_proj = None
else:
trait_type = i.type or 'string'
traits_filter.append({"key": i.field,
trait_type: i._get_value_as_type(),
"op": i.op})
return storage.EventFilter(traits_filter=traits_filter,
admin_proj=admin_proj, **evt_model_filter)
@profiler.trace_cls('api')
class TraitsController(rest.RestController):
"""Works on Event Traits."""
@v2_utils.requires_admin
@wsme_pecan.wsexpose([Trait], wtypes.text, wtypes.text)
def get_one(self, event_type, trait_name):
"""Return all instances of a trait for an event type.
:param event_type: Event type to filter traits by
:param trait_name: Trait to return values for
"""
LOG.debug("Getting traits for %s", event_type)
return [Trait._convert_storage_trait(t)
for t in pecan.request.conn.get_traits(event_type, trait_name)]
@v2_utils.requires_admin
@wsme_pecan.wsexpose([TraitDescription], wtypes.text)
def get_all(self, event_type):
"""Return all trait names for an event type.
:param event_type: Event type to filter traits by
"""
get_trait_name = event_models.Trait.get_name_by_type
return [TraitDescription(name=t['name'],
type=get_trait_name(t['data_type']))
for t in pecan.request.conn.get_trait_types(event_type)]
@profiler.trace_cls('api')
class EventTypesController(rest.RestController):
"""Works on Event Types in the system."""
traits = TraitsController()
@v2_utils.requires_admin
@wsme_pecan.wsexpose(None, wtypes.text)
def get_one(self, event_type):
"""Unused API, will always return 404.
:param event_type: A event type
"""
pecan.abort(404)
@v2_utils.requires_admin
@wsme_pecan.wsexpose([six.text_type])
def get_all(self):
"""Get all event types."""
return list(pecan.request.conn.get_event_types())
@profiler.trace_cls('api')
class EventsController(rest.RestController):
"""Works on Events."""
@v2_utils.requires_context
@wsme_pecan.wsexpose([Event], [EventQuery], int, [str], str)
def get_all(self, q=None, limit=None, sort=None, marker=None):
"""Return all events matching the query filters.
:param q: Filter arguments for which Events to return
:param limit: Maximum number of samples to be returned.
:param sort: A pair of sort key and sort direction combined with ":"
:param marker: The pagination query marker, message id of the last
item viewed
"""
rbac.enforce("events:index", pecan.request)
q = q or []
event_filter = _event_query_to_event_filter(q)
pagination = v2_utils.set_pagination_options(
sort, limit, marker, event_models.Event)
return [Event(message_id=event.message_id,
event_type=event.event_type,
generated=event.generated,
traits=event.traits,
raw=event.raw)
for event in
pecan.request.conn.get_events(event_filter, pagination)]
@v2_utils.requires_context
@wsme_pecan.wsexpose(Event, wtypes.text)
def get_one(self, message_id):
"""Return a single event with the given message id.
:param message_id: Message ID of the Event to be returned
"""
rbac.enforce("events:show", pecan.request)
filters = _build_rbac_query_filters()
t_filter = filters['t_filter']
admin_proj = filters['admin_proj']
event_filter = storage.EventFilter(traits_filter=t_filter,
admin_proj=admin_proj,
message_id=message_id)
events = [event for event
in pecan.request.conn.get_events(event_filter)]
if not events:
raise base.EntityNotFound(_("Event"), message_id)
if len(events) > 1:
LOG.error(("More than one event with "
"id %s returned from storage driver"), message_id)
event = events[0]
return Event(message_id=event.message_id,
event_type=event.event_type,
generated=event.generated,
traits=event.traits,
raw=event.raw)
panko-7.0.0/panko/api/controllers/v2/utils.py 0000664 0001750 0001750 00000012311 13551610116 021135 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import pecan
import wsme
from panko.api.controllers.v2 import base
from panko.api import rbac
def get_auth_project(on_behalf_of=None):
auth_project = rbac.get_limited_to_project(pecan.request.headers)
created_by = pecan.request.headers.get('X-Project-Id')
is_admin = auth_project is None
if is_admin and on_behalf_of != created_by:
auth_project = on_behalf_of
return auth_project
# TODO(fabiog): this decorator should disappear and have a more unified
# way of controlling access and scope. Before messing with this, though
# I feel this file should be re-factored in smaller chunks one for each
# controller (e.g. meters and so on ...). Right now its size is
# overwhelming.
def requires_admin(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
usr_limit, proj_limit = rbac.get_limited_to(pecan.request.headers)
# If User and Project are None, you have full access.
if usr_limit and proj_limit:
# since this decorator get's called out of wsme context
# raising exception results internal error so call abort
# for handling the error
ex = base.ProjectNotAuthorized(proj_limit)
pecan.core.abort(status_code=ex.code, detail=ex.msg)
return func(*args, **kwargs)
return wrapped
def requires_context(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
req_usr = pecan.request.headers.get('X-User-Id')
proj_usr = pecan.request.headers.get('X-Project-Id')
if ((not req_usr) or (not proj_usr)):
pecan.core.abort(status_code=403,
detail='RBAC Authorization Failed')
return func(*args, **kwargs)
return wrapped
def set_pagination_options(sort, limit, marker, api_model):
"""Sets the options for pagination specifying query options
Arguments:
sort -- List of sorting criteria. Each sorting option has to format
:
Valid sort keys: message_id, generated
(SUPPORT_SORT_KEYS in panko/event/storage/models.py)
Valid sort directions: asc (ascending), desc (descending)
(SUPPORT_DIRS in panko/event/storage/models.py)
This defaults to asc if unspecified
(DEFAULT_DIR in panko/event/storage/models.py)
impl_sqlalchemy.py:
(see _get_pagination_query)
If sort list is empty, this defaults to
['generated:asc', 'message_id:asc']
(DEFAULT_SORT in panko/event/storage/models.py)
limit -- Integer specifying maximum number of values to return
If unspecified, this defaults to
pecan.request.cfg.api.default_api_return_limit
marker -- If specified, assumed to be an integer and assumed to be the
message id of the last object on the previous page of the results
api_model -- Specifies the class implementing the api model to use for
this pagination. The class is expected to provide the
following members:
SUPPORT_DIRS
SUPPORT_SORT_KEYS
DEFAULT_DIR
DEFAULT_SORT
PRIMARY_KEY
"""
if limit and limit <= 0:
raise wsme.exc.InvalidInput('limit', limit,
'the limit should be a positive integer.')
if not limit:
limit = pecan.request.cfg.api.default_api_return_limit
sorts = list()
for s in sort or []:
sort_key, __, sort_dir = s.partition(':')
if sort_key not in api_model.SUPPORT_SORT_KEYS:
raise wsme.exc.InvalidInput(
'sort', s, "the sort parameter should be a pair of sort "
"key and sort dir combined with ':', or only"
" sort key specified and sort dir will be default "
"'%s', the supported sort keys are: %s" %
(str(api_model.DEFAULT_DIR),
str(api_model.SUPPORT_SORT_KEYS)))
if sort_dir and sort_dir not in api_model.SUPPORT_DIRS:
raise wsme.exc.InvalidInput(
'sort direction', s,
"the sort parameter should be a pair of sort "
"key and sort dir combined with ':', or only"
" sort key specified and sort dir will be default "
"'%s', the supported sort directions are: %s" %
(str(api_model.DEFAULT_DIR),
str(api_model.SUPPORT_DIRS)))
sorts.append((sort_key, sort_dir or api_model.DEFAULT_DIR))
return {'limit': limit,
'marker': marker,
'sort': sorts}
panko-7.0.0/panko/api/controllers/v2/base.py 0000664 0001750 0001750 00000020053 13551610116 020711 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import datetime
import functools
import inspect
from oslo_serialization import jsonutils
from oslo_utils import strutils
from oslo_utils import timeutils
import pecan
import six
import wsme
from wsme import types as wtypes
from panko.i18n import _
operation_kind = ('lt', 'le', 'eq', 'ne', 'ge', 'gt')
operation_kind_enum = wtypes.Enum(str, *operation_kind)
class ClientSideError(wsme.exc.ClientSideError):
def __init__(self, error, status_code=400):
pecan.response.translatable_error = error
super(ClientSideError, self).__init__(error, status_code)
class EntityNotFound(ClientSideError):
def __init__(self, entity, id):
super(EntityNotFound, self).__init__(
_("%(entity)s %(id)s Not Found") % {'entity': entity,
'id': id},
status_code=404)
class ProjectNotAuthorized(ClientSideError):
def __init__(self, id, aspect='project'):
params = dict(aspect=aspect, id=id)
super(ProjectNotAuthorized, self).__init__(
_("Not Authorized to access %(aspect)s %(id)s") % params,
status_code=401)
class AdvEnum(wtypes.wsproperty):
"""Handle default and mandatory for wtypes.Enum."""
def __init__(self, name, *args, **kwargs):
self._name = '_advenum_%s' % name
self._default = kwargs.pop('default', None)
mandatory = kwargs.pop('mandatory', False)
enum = wtypes.Enum(*args, **kwargs)
super(AdvEnum, self).__init__(datatype=enum, fget=self._get,
fset=self._set, mandatory=mandatory)
def _get(self, parent):
if hasattr(parent, self._name):
value = getattr(parent, self._name)
return value or self._default
return self._default
def _set(self, parent, value):
try:
if self.datatype.validate(value):
setattr(parent, self._name, value)
except ValueError as e:
raise wsme.exc.InvalidInput(self._name.replace('_advenum_', '', 1),
value, e)
class Base(wtypes.DynamicBase):
@classmethod
def from_db_model(cls, m):
return cls(**(m.as_dict()))
@classmethod
def from_db_and_links(cls, m, links):
return cls(links=links, **(m.as_dict()))
def as_dict(self, db_model):
valid_keys = inspect.getargspec(db_model.__init__)[0]
if 'self' in valid_keys:
valid_keys.remove('self')
return self.as_dict_from_keys(valid_keys)
def as_dict_from_keys(self, keys):
return dict((k, getattr(self, k))
for k in keys
if hasattr(self, k) and
getattr(self, k) != wsme.Unset)
class Query(Base):
"""Query filter."""
# The data types supported by the query.
_supported_types = ['integer', 'float', 'string', 'boolean', 'datetime']
# Functions to convert the data field to the correct type.
_type_converters = {'integer': int,
'float': float,
'boolean': functools.partial(
strutils.bool_from_string, strict=True),
'string': six.text_type,
'datetime': timeutils.parse_isotime}
_op = None # provide a default
def get_op(self):
return self._op or 'eq'
def set_op(self, value):
self._op = value
field = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the field to test"
# op = wsme.wsattr(operation_kind, default='eq')
# this ^ doesn't seem to work.
op = wsme.wsproperty(operation_kind_enum, get_op, set_op)
"The comparison operator. Defaults to 'eq'."
value = wsme.wsattr(wtypes.text, mandatory=True)
"The value to compare against the stored data"
type = wtypes.text
"The data type of value to compare against the stored data"
def __repr__(self):
# for logging calls
return '' % (self.field,
self.op,
self.value,
self.type)
@classmethod
def sample(cls):
return cls(field='resource_id',
op='eq',
value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36',
type='string'
)
def as_dict(self):
return self.as_dict_from_keys(['field', 'op', 'type', 'value'])
def _get_value_as_type(self, forced_type=None):
"""Convert metadata value to the specified data type.
This method is called during metadata query to help convert the
querying metadata to the data type specified by user. If there is no
data type given, the metadata will be parsed by ast.literal_eval to
try to do a smart converting.
NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised
from wsmeext/sphinxext.py. It's OK to call it outside the Query class.
Because the "public" side of that class is actually the outside of the
API, and the "private" side is the API implementation. The method is
only used in the API implementation, so it's OK.
:returns: metadata value converted with the specified data type.
"""
type = forced_type or self.type
try:
converted_value = self.value
if not type:
try:
converted_value = ast.literal_eval(self.value)
except (ValueError, SyntaxError):
# Unable to convert the metadata value automatically
# let it default to self.value
pass
else:
if type not in self._supported_types:
# Types must be explicitly declared so the
# correct type converter may be used. Subclasses
# of Query may define _supported_types and
# _type_converters to define their own types.
raise TypeError()
converted_value = self._type_converters[type](self.value)
if isinstance(converted_value, datetime.datetime):
converted_value = timeutils.normalize_time(converted_value)
except ValueError:
msg = (_('Unable to convert the value %(value)s'
' to the expected data type %(type)s.') %
{'value': self.value, 'type': type})
raise ClientSideError(msg)
except TypeError:
msg = (_('The data type %(type)s is not supported. The supported'
' data type list is: %(supported)s') %
{'type': type, 'supported': self._supported_types})
raise ClientSideError(msg)
except Exception:
msg = (_('Unexpected exception converting %(value)s to'
' the expected data type %(type)s.') %
{'value': self.value, 'type': type})
raise ClientSideError(msg)
return converted_value
class JsonType(wtypes.UserType):
"""A simple JSON type."""
basetype = wtypes.text
name = 'json'
@staticmethod
def validate(value):
# check that value can be serialised
jsonutils.dumps(value)
return value
panko-7.0.0/panko/api/controllers/v2/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021524 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/api/controllers/v2/root.py 0000664 0001750 0001750 00000002150 13551610116 020760 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from panko.api.controllers.v2 import capabilities
from panko.api.controllers.v2 import events
class V2Controller(object):
"""Version 2 API controller root."""
event_types = events.EventTypesController()
events = events.EventsController()
capabilities = capabilities.CapabilitiesController()
panko-7.0.0/panko/api/controllers/v2/capabilities.py 0000664 0001750 0001750 00000005031 13551610116 022427 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 IBM Corp.
# Copyright 2013 eNovance
# Copyright Ericsson AB 2013. All rights reserved
# Copyright 2014 Hewlett-Packard Company
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from panko.api.controllers.v2 import base
from panko import profiler
from panko import utils
def _flatten_capabilities(capabilities):
return dict((k, v) for k, v in utils.recursive_keypairs(capabilities))
@profiler.trace_cls('api')
class Capabilities(base.Base):
"""A representation of the API and storage capabilities.
Usually constrained by restrictions imposed by the storage driver.
"""
api = {wtypes.text: bool}
"A flattened dictionary of API capabilities"
event_storage = {wtypes.text: bool}
"A flattened dictionary of event storage capabilities"
@classmethod
def sample(cls):
return cls(
api=_flatten_capabilities({
'events': {'query': {'simple': True}},
}),
event_storage=_flatten_capabilities(
{'storage': {'production_ready': True}}),
)
@profiler.trace_cls('api')
class CapabilitiesController(rest.RestController):
"""Manages capabilities queries."""
@wsme_pecan.wsexpose(Capabilities)
def get(self):
"""Returns a flattened dictionary of API capabilities.
Capabilities supported by the currently configured storage driver.
"""
# variation in API capabilities is effectively determined by
# the lack of strict feature parity across storage drivers
conn = pecan.request.conn
driver_capabilities = {'events': conn.get_capabilities()['events']}
driver_perf = conn.get_storage_capabilities()
return Capabilities(api=_flatten_capabilities(driver_capabilities),
event_storage=_flatten_capabilities(driver_perf))
panko-7.0.0/panko/api/controllers/root.py 0000664 0001750 0001750 00000003373 13551610116 020441 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
MEDIA_TYPE_JSON = 'application/vnd.openstack.telemetry-%s+json'
MEDIA_TYPE_XML = 'application/vnd.openstack.telemetry-%s+xml'
class VersionsController(object):
@pecan.expose('json')
def index(self):
base_url = pecan.request.application_url
available = [{'tag': 'v2', 'date': '2013-02-13T00:00:00Z', }]
collected = [version_descriptor(base_url, v['tag'], v['date'])
for v in available]
versions = {'versions': {'values': collected}}
return versions
def version_descriptor(base_url, version, released_on):
url = version_url(base_url, version)
return {
'id': version,
'links': [
{'href': url, 'rel': 'self', },
{'href': 'https://docs.openstack.org/',
'rel': 'describedby', 'type': 'text/html', }],
'media-types': [
{'base': 'application/json', 'type': MEDIA_TYPE_JSON % version, },
{'base': 'application/xml', 'type': MEDIA_TYPE_XML % version, }],
'status': 'stable',
'updated': released_on,
}
def version_url(base_url, version_number):
return '%s/%s' % (base_url, version_number)
panko-7.0.0/panko/api/rbac.py 0000664 0001750 0001750 00000006356 13551610116 016023 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2014 Hewlett-Packard Company
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Access Control Lists (ACL's) control access the API server."""
from oslo_policy import policy
import pecan
from panko import policies
_ENFORCER = None
def init():
global _ENFORCER
if not _ENFORCER:
_ENFORCER = policy.Enforcer(pecan.request.cfg)
_ENFORCER.load_rules()
_ENFORCER.register_defaults(policies.list_policies())
def reset():
global _ENFORCER
if _ENFORCER:
_ENFORCER.clear()
_ENFORCER = None
def _has_rule(name):
return name in _ENFORCER.rules.keys()
def enforce(policy_name, request):
"""Return the user and project the request should be limited to.
:param request: HTTP request
:param policy_name: the policy name to validate authz against.
"""
init()
rule_method = "telemetry:" + policy_name
headers = request.headers
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['user_id'] = (headers.get('X-User-Id'))
policy_dict['project_id'] = (headers.get('X-Project-Id'))
# maintain backward compat with Juno and previous by allowing the action if
# there is no rule defined for it
if ((_has_rule('default') or _has_rule(rule_method)) and
not _ENFORCER.enforce(rule_method, {}, policy_dict)):
pecan.core.abort(status_code=403, detail='RBAC Authorization Failed')
# TODO(fabiog): these methods are still used because the scoping part is really
# convoluted and difficult to separate out.
def get_limited_to(headers):
"""Return the user and project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A tuple of (user, project), set to None if there's no limit on
one of these.
"""
init()
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['user_id'] = (headers.get('X-User-Id'))
policy_dict['project_id'] = (headers.get('X-Project-Id'))
# maintain backward compat with Juno and previous by using context_is_admin
# rule if the segregation rule (added in Kilo) is not defined
rule_name = 'segregation' if _has_rule(
'segregation') else 'context_is_admin'
if not _ENFORCER.enforce(rule_name,
{},
policy_dict):
return headers.get('X-User-Id'), headers.get('X-Project-Id')
return None, None
def get_limited_to_project(headers):
"""Return the project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A project, or None if there's no limit on it.
"""
return get_limited_to(headers)[1]
panko-7.0.0/panko/tests/ 0000775 0001750 0001750 00000000000 13551610251 015121 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/db.py 0000664 0001750 0001750 00000016655 13551610116 016075 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for API tests."""
import os
import warnings
import fixtures
import mock
from oslo_utils import uuidutils
import six
from six.moves.urllib import parse as urlparse
import sqlalchemy
from testtools import testcase
from panko import service
from panko import storage
from panko.tests import base as test_base
try:
from panko.tests import mocks
except ImportError:
mocks = None # happybase module is not Python 3 compatible yet
class MongoDbManager(fixtures.Fixture):
def __init__(self, url, conf):
self._url = url
self.conf = conf
def setUp(self):
super(MongoDbManager, self).setUp()
with warnings.catch_warnings():
warnings.filterwarnings(
action='ignore',
message='.*you must provide a username and password.*')
try:
self.connection = storage.get_connection(self.url, self.conf)
except storage.StorageBadVersion as e:
raise testcase.TestSkipped(six.text_type(e))
@property
def url(self):
return '%(url)s_%(db)s' % {
'url': self._url,
'db': uuidutils.generate_uuid(dashed=False)
}
class SQLManager(fixtures.Fixture):
def __init__(self, url, conf):
db_name = 'panko_%s' % uuidutils.generate_uuid(dashed=False)
engine = sqlalchemy.create_engine(url)
conn = engine.connect()
self._create_database(conn, db_name)
conn.close()
engine.dispose()
parsed = list(urlparse.urlparse(url))
parsed[2] = '/' + db_name
self.url = urlparse.urlunparse(parsed)
self.conf = conf
def setUp(self):
super(SQLManager, self).setUp()
self.connection = storage.get_connection(self.url, self.conf)
class PgSQLManager(SQLManager):
@staticmethod
def _create_database(conn, db_name):
conn.connection.set_isolation_level(0)
conn.execute('CREATE DATABASE %s WITH TEMPLATE template0;' % db_name)
conn.connection.set_isolation_level(1)
class MySQLManager(SQLManager):
@staticmethod
def _create_database(conn, db_name):
conn.execute('CREATE DATABASE %s;' % db_name)
class ElasticSearchManager(fixtures.Fixture):
def __init__(self, url, conf):
self.url = url
self.conf = conf
def setUp(self):
super(ElasticSearchManager, self).setUp()
self.connection = storage.get_connection(
self.url, self.conf)
# prefix each test with unique index name
inx_uuid = uuidutils.generate_uuid(dashed=False)
self.connection.index_name = 'events_%s' % inx_uuid
# force index on write so data is queryable right away
self.connection._refresh_on_write = True
class HBaseManager(fixtures.Fixture):
def __init__(self, url, conf):
self._url = url
self.conf = conf
def setUp(self):
super(HBaseManager, self).setUp()
self.connection = storage.get_connection(
self.url, self.conf)
# Unique prefix for each test to keep data is distinguished because
# all test data is stored in one table
data_prefix = uuidutils.generate_uuid(dashed=False)
def table(conn, name):
return mocks.MockHBaseTable(name, conn, data_prefix)
# Mock only real HBase connection, MConnection "table" method
# stays origin.
mock.patch('happybase.Connection.table', new=table).start()
# We shouldn't delete data and tables after each test,
# because it last for too long.
# All tests tables will be deleted in setup-test-env.sh
mock.patch("happybase.Connection.disable_table",
new=mock.MagicMock()).start()
mock.patch("happybase.Connection.delete_table",
new=mock.MagicMock()).start()
mock.patch("happybase.Connection.create_table",
new=mock.MagicMock()).start()
@property
def url(self):
return '%s?table_prefix=%s&table_prefix_separator=%s' % (
self._url,
os.getenv("PANKO_TEST_HBASE_TABLE_PREFIX", "test"),
os.getenv("PANKO_TEST_HBASE_TABLE_PREFIX_SEPARATOR", "_")
)
class SQLiteManager(fixtures.Fixture):
def __init__(self, url, conf):
self.url = url
self.conf = conf
def setUp(self):
super(SQLiteManager, self).setUp()
self.connection = storage.get_connection(
self.url, self.conf)
@six.add_metaclass(test_base.SkipNotImplementedMeta)
class TestBase(test_base.BaseTestCase):
DRIVER_MANAGERS = {
'mongodb': MongoDbManager,
'mysql': MySQLManager,
'postgresql': PgSQLManager,
'sqlite': SQLiteManager,
'es': ElasticSearchManager,
}
if mocks is not None:
DRIVER_MANAGERS['hbase'] = HBaseManager
def setUp(self):
super(TestBase, self).setUp()
db_url = os.environ.get('PIFPAF_URL', "sqlite://").replace(
"mysql://", "mysql+pymysql://")
engine = urlparse.urlparse(db_url).scheme
# in case some drivers have additional specification, for example:
# PyMySQL will have scheme mysql+pymysql
engine = engine.split('+')[0]
# NOTE(Alexei_987) Shortcut to skip expensive db setUp
test_method = self._get_test_method()
if (hasattr(test_method, '_run_with')
and engine not in test_method._run_with):
raise testcase.TestSkipped(
'Test is not applicable for %s' % engine)
self.CONF = service.prepare_service([], [])
manager = self.DRIVER_MANAGERS.get(engine)
if not manager:
self.skipTest("missing driver manager: %s" % engine)
self.db_manager = manager(db_url, self.CONF)
self.useFixture(self.db_manager)
self.conn = self.db_manager.connection
self.conn.upgrade()
self.useFixture(fixtures.MockPatch('panko.storage.get_connection',
side_effect=self._get_connection))
def tearDown(self):
self.conn.clear()
self.conn = None
super(TestBase, self).tearDown()
def _get_connection(self, url, conf):
return self.conn
def run_with(*drivers):
"""Used to mark tests that are only applicable for certain db driver.
Skips test if driver is not available.
"""
def decorator(test):
if isinstance(test, type) and issubclass(test, TestBase):
# Decorate all test methods
for attr in dir(test):
value = getattr(test, attr)
if callable(value) and attr.startswith('test_'):
if six.PY3:
value._run_with = drivers
else:
value.__func__._run_with = drivers
else:
test._run_with = drivers
return test
return decorator
panko-7.0.0/panko/tests/unit/ 0000775 0001750 0001750 00000000000 13551610251 016100 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/meter/ 0000775 0001750 0001750 00000000000 13551610251 017214 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/meter/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021313 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/storage/ 0000775 0001750 0001750 00000000000 13551610251 017544 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/storage/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021643 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/storage/sqlalchemy/ 0000775 0001750 0001750 00000000000 13551610251 021706 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/storage/sqlalchemy/__init__.py 0000664 0001750 0001750 00000000000 13551610116 024005 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/storage/sqlalchemy/test_models.py 0000664 0001750 0001750 00000007302 13551610116 024604 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslotest import base
import sqlalchemy
from sqlalchemy.dialects.mysql import DECIMAL
from sqlalchemy.types import NUMERIC
from panko.storage.sqlalchemy import models
from panko import utils
class PreciseTimestampTest(base.BaseTestCase):
@staticmethod
def fake_dialect(name):
def _type_descriptor_mock(desc):
if type(desc) == DECIMAL:
return NUMERIC(precision=desc.precision, scale=desc.scale)
dialect = mock.MagicMock()
dialect.name = name
dialect.type_descriptor = _type_descriptor_mock
return dialect
def setUp(self):
super(PreciseTimestampTest, self).setUp()
self._mysql_dialect = self.fake_dialect('mysql')
self._postgres_dialect = self.fake_dialect('postgres')
self._type = models.PreciseTimestamp()
self._date = datetime.datetime(2012, 7, 2, 10, 44)
def test_load_dialect_impl_mysql(self):
result = self._type.load_dialect_impl(self._mysql_dialect)
self.assertEqual(NUMERIC, type(result))
self.assertEqual(20, result.precision)
self.assertEqual(6, result.scale)
self.assertTrue(result.asdecimal)
def test_load_dialect_impl_postgres(self):
result = self._type.load_dialect_impl(self._postgres_dialect)
self.assertEqual(sqlalchemy.DateTime, type(result))
def test_process_bind_param_store_decimal_mysql(self):
expected = utils.dt_to_decimal(self._date)
result = self._type.process_bind_param(self._date, self._mysql_dialect)
self.assertEqual(expected, result)
def test_process_bind_param_store_datetime_postgres(self):
result = self._type.process_bind_param(self._date,
self._postgres_dialect)
self.assertEqual(self._date, result)
def test_process_bind_param_store_none_mysql(self):
result = self._type.process_bind_param(None, self._mysql_dialect)
self.assertIsNone(result)
def test_process_bind_param_store_none_postgres(self):
result = self._type.process_bind_param(None,
self._postgres_dialect)
self.assertIsNone(result)
def test_process_result_value_datetime_mysql(self):
dec_value = utils.dt_to_decimal(self._date)
result = self._type.process_result_value(dec_value,
self._mysql_dialect)
self.assertEqual(self._date, result)
def test_process_result_value_datetime_postgres(self):
result = self._type.process_result_value(self._date,
self._postgres_dialect)
self.assertEqual(self._date, result)
def test_process_result_value_none_mysql(self):
result = self._type.process_result_value(None,
self._mysql_dialect)
self.assertIsNone(result)
def test_process_result_value_none_postgres(self):
result = self._type.process_result_value(None,
self._postgres_dialect)
self.assertIsNone(result)
panko-7.0.0/panko/tests/unit/storage/test_get_connection.py 0000664 0001750 0001750 00000006053 13551610116 024157 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for panko/storage/
"""
import mock
from oslotest import base
import six
from panko import service
from panko import storage
from panko.storage import impl_log
from panko.storage import impl_sqlalchemy
class EngineTest(base.BaseTestCase):
def test_get_connection(self):
engine = storage.get_connection('log://localhost', None)
self.assertIsInstance(engine, impl_log.Connection)
def test_get_connection_no_such_engine(self):
try:
storage.get_connection('no-such-engine://localhost', None)
except RuntimeError as err:
self.assertIn('no-such-engine', six.text_type(err))
class ConnectionRetryTest(base.BaseTestCase):
def setUp(self):
super(ConnectionRetryTest, self).setUp()
self.CONF = service.prepare_service([], config_files=[])
def test_retries(self):
# stevedore gives warning log instead of any exception
with mock.patch.object(storage, 'get_connection',
side_effect=Exception) as retries:
try:
self.CONF.set_override("retry_interval", 1,
group="database")
self.CONF.set_override("max_retries", 3,
group="database")
storage.get_connection_from_config(self.CONF)
except Exception:
self.assertEqual(3, retries.call_count)
else:
self.fail()
class ConnectionConfigTest(base.BaseTestCase):
def setUp(self):
super(ConnectionConfigTest, self).setUp()
self.CONF = service.prepare_service([], config_files=[])
def test_only_default_url(self):
self.CONF.set_override("connection", "log://", group="database")
conn = storage.get_connection_from_config(self.CONF)
self.assertIsInstance(conn, impl_log.Connection)
def test_two_urls(self):
self.CONF.set_override("connection", "log://", group="database")
self.CONF.set_override("event_connection", "sqlite://",
group="database")
conn = storage.get_connection_from_config(self.CONF)
self.assertIsInstance(conn, impl_log.Connection)
def test_sqlalchemy_driver(self):
self.CONF.set_override("connection", "sqlite+pysqlite://",
group="database")
conn = storage.get_connection_from_config(self.CONF)
self.assertIsInstance(conn, impl_sqlalchemy.Connection)
panko-7.0.0/panko/tests/unit/test_utils.py 0000664 0001750 0001750 00000006363 13551610116 020661 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for panko/utils.py
"""
import datetime
import decimal
from oslotest import base
from panko import utils
class TestUtils(base.BaseTestCase):
def test_datetime_to_decimal(self):
expected = 1356093296.12
utc_datetime = datetime.datetime.utcfromtimestamp(expected)
actual = utils.dt_to_decimal(utc_datetime)
self.assertAlmostEqual(expected, float(actual), places=5)
def test_decimal_to_datetime(self):
expected = 1356093296.12
dexpected = decimal.Decimal(str(expected)) # Python 2.6 wants str()
expected_datetime = datetime.datetime.utcfromtimestamp(expected)
actual_datetime = utils.decimal_to_dt(dexpected)
# Python 3 have rounding issue on this, so use float
self.assertAlmostEqual(utils.dt_to_decimal(expected_datetime),
utils.dt_to_decimal(actual_datetime),
places=5)
def test_recursive_keypairs(self):
data = {'a': 'A', 'b': 'B',
'nested': {'a': 'A', 'b': 'B'}}
pairs = list(utils.recursive_keypairs(data))
self.assertEqual([('a', 'A'), ('b', 'B'),
('nested:a', 'A'), ('nested:b', 'B')],
pairs)
def test_recursive_keypairs_with_separator(self):
data = {'a': 'A',
'b': 'B',
'nested': {'a': 'A',
'b': 'B',
},
}
separator = '.'
pairs = list(utils.recursive_keypairs(data, separator))
self.assertEqual([('a', 'A'),
('b', 'B'),
('nested.a', 'A'),
('nested.b', 'B')],
pairs)
def test_recursive_keypairs_with_list_of_dict(self):
small = 1
big = 1 << 64
expected = [('a', 'A'),
('b', 'B'),
('nested:list', [{small: 99, big: 42}])]
data = {'a': 'A',
'b': 'B',
'nested': {'list': [{small: 99, big: 42}]}}
pairs = list(utils.recursive_keypairs(data))
self.assertEqual(len(expected), len(pairs))
for k, v in pairs:
# the keys 1 and 1<<64 cause a hash collision on 64bit platforms
if k == 'nested:list':
self.assertIn(v,
[[{small: 99, big: 42}],
[{big: 42, small: 99}]])
else:
self.assertIn((k, v), expected)
def test_decimal_to_dt_with_none_parameter(self):
self.assertIsNone(utils.decimal_to_dt(None))
panko-7.0.0/panko/tests/unit/event/ 0000775 0001750 0001750 00000000000 13551610251 017221 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/event/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021320 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/__init__.py 0000664 0001750 0001750 00000000000 13551610116 020177 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/api/ 0000775 0001750 0001750 00000000000 13551610251 016651 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/api/test_app.py 0000664 0001750 0001750 00000002310 13551610116 021036 0 ustar zuul zuul 0000000 0000000 # Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from panko.api import app
from panko import service
from panko.tests import base
class TestApp(base.BaseTestCase):
def setUp(self):
super(TestApp, self).setUp()
self.CONF = service.prepare_service([], [])
def test_api_paste_file_not_exist(self):
self.CONF.set_override('api_paste_config', 'non-existent-file')
with mock.patch.object(self.CONF, 'find_file') as ff:
ff.return_value = None
self.assertRaises(cfg.ConfigFilesNotFoundError, app.load_app,
self.CONF)
panko-7.0.0/panko/tests/unit/api/test_versions.py 0000664 0001750 0001750 00000003075 13551610116 022137 0 ustar zuul zuul 0000000 0000000 # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from panko.tests.functional import api
V2_MEDIA_TYPES = [
{
'base': 'application/json',
'type': 'application/vnd.openstack.telemetry-v2+json'
}, {
'base': 'application/xml',
'type': 'application/vnd.openstack.telemetry-v2+xml'
}
]
V2_HTML_DESCRIPTION = {
'href': 'https://docs.openstack.org/',
'rel': 'describedby',
'type': 'text/html',
}
V2_EXPECTED_RESPONSE = {
'id': 'v2',
'links': [
{
'rel': 'self',
'href': 'http://localhost/v2',
},
V2_HTML_DESCRIPTION
],
'media-types': V2_MEDIA_TYPES,
'status': 'stable',
'updated': '2013-02-13T00:00:00Z',
}
V2_VERSION_RESPONSE = {
"version": V2_EXPECTED_RESPONSE
}
VERSIONS_RESPONSE = {
"versions": {
"values": [
V2_EXPECTED_RESPONSE
]
}
}
class TestVersions(api.FunctionalTest):
def test_versions(self):
data = self.get_json('/')
self.assertEqual(VERSIONS_RESPONSE, data)
panko-7.0.0/panko/tests/unit/api/__init__.py 0000664 0001750 0001750 00000000000 13551610116 020750 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/api/v2/ 0000775 0001750 0001750 00000000000 13551610251 017200 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/unit/api/v2/test_wsme_custom_type.py 0000664 0001750 0001750 00000002137 13551610116 024222 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base
import wsme
from panko.api.controllers.v2 import base as v2_base
class TestWsmeCustomType(base.BaseTestCase):
def test_advenum_default(self):
class dummybase(wsme.types.Base):
ae = v2_base.AdvEnum("name", str, "one", "other", default="other")
obj = dummybase()
self.assertEqual("other", obj.ae)
obj = dummybase(ae="one")
self.assertEqual("one", obj.ae)
self.assertRaises(wsme.exc.InvalidInput, dummybase, ae="not exists")
panko-7.0.0/panko/tests/unit/api/v2/test_query.py 0000664 0001750 0001750 00000015004 13551610116 021756 0 ustar zuul zuul 0000000 0000000 # Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test the methods related to query."""
import datetime
import fixtures
import mock
from oslotest import base
import wsme
from panko.api.controllers.v2 import base as v2_base
from panko.api.controllers.v2 import events
class TestQuery(base.BaseTestCase):
def setUp(self):
super(TestQuery, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
'pecan.response', mock.MagicMock()))
self.useFixture(fixtures.MockPatch('panko.api.controllers.v2.events'
'._build_rbac_query_filters',
return_value={'t_filter': [],
'admin_proj': None}))
def test_get_value_as_type_with_integer(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='123',
type='integer')
expected = 123
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_float(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='123.456',
type='float')
expected = 123.456
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_boolean(self):
query = v2_base.Query(field='metadata.is_public',
op='eq',
value='True',
type='boolean')
expected = True
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_string(self):
query = v2_base.Query(field='metadata.name',
op='eq',
value='linux',
type='string')
expected = 'linux'
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_datetime(self):
query = v2_base.Query(field='metadata.date',
op='eq',
value='2014-01-01T05:00:00',
type='datetime')
self.assertIsInstance(query._get_value_as_type(), datetime.datetime)
self.assertIsNone(query._get_value_as_type().tzinfo)
def test_get_value_as_type_with_integer_without_type(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='123')
expected = 123
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_float_without_type(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='123.456')
expected = 123.456
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_boolean_without_type(self):
query = v2_base.Query(field='metadata.is_public',
op='eq',
value='True')
expected = True
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_string_without_type(self):
query = v2_base.Query(field='metadata.name',
op='eq',
value='linux')
expected = 'linux'
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_bad_type(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='123.456',
type='blob')
self.assertRaises(wsme.exc.ClientSideError, query._get_value_as_type)
def test_get_value_as_type_with_bad_value(self):
query = v2_base.Query(field='metadata.size',
op='eq',
value='fake',
type='integer')
self.assertRaises(wsme.exc.ClientSideError, query._get_value_as_type)
def test_get_value_as_type_integer_expression_without_type(self):
# bug 1221736
query = v2_base.Query(field='should_be_a_string',
op='eq',
value='WWW-Layer-4a80714f')
expected = 'WWW-Layer-4a80714f'
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_boolean_expression_without_type(self):
# bug 1221736
query = v2_base.Query(field='should_be_a_string',
op='eq',
value='True or False')
expected = 'True or False'
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_syntax_error(self):
# bug 1221736
value = 'WWW-Layer-4a80714f-0232-4580-aa5e-81494d1a4147-uolhh25p5xxm'
query = v2_base.Query(field='group_id',
op='eq',
value=value)
expected = value
self.assertEqual(expected, query._get_value_as_type())
def test_get_value_as_type_with_syntax_error_colons(self):
# bug 1221736
value = 'Ref::StackId'
query = v2_base.Query(field='field_name',
op='eq',
value=value)
expected = value
self.assertEqual(expected, query._get_value_as_type())
def test_event_query_to_event_filter_with_bad_op(self):
# bug 1511592
query = v2_base.Query(field='event_type',
op='ne',
value='compute.instance.create.end',
type='string')
self.assertRaises(v2_base.ClientSideError,
events._event_query_to_event_filter, [query])
panko-7.0.0/panko/tests/unit/api/v2/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021277 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/base.py 0000664 0001750 0001750 00000005554 13551610116 016416 0 ustar zuul zuul 0000000 0000000 # Copyright 2012 New Dream Network (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test base classes.
"""
import functools
import os.path
from oslo_utils import timeutils
from oslotest import base
import six
from testtools import testcase
import webtest
import panko
class BaseTestCase(base.BaseTestCase):
def assertTimestampEqual(self, first, second, msg=None):
"""Checks that two timestamps are equals.
This relies on assertAlmostEqual to avoid rounding problem, and only
checks up the first microsecond values.
"""
return self.assertAlmostEqual(
timeutils.delta_seconds(first, second),
0.0,
places=5)
def assertIsEmpty(self, obj):
try:
if len(obj) != 0:
self.fail("%s is not empty" % type(obj))
except (TypeError, AttributeError):
self.fail("%s doesn't have length" % type(obj))
def assertIsNotEmpty(self, obj):
try:
if len(obj) == 0:
self.fail("%s is empty" % type(obj))
except (TypeError, AttributeError):
self.fail("%s doesn't have length" % type(obj))
@staticmethod
def path_get(project_file=None):
root = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..',
'..',
)
)
if project_file:
return os.path.join(root, project_file)
else:
return root
def _skip_decorator(func):
@functools.wraps(func)
def skip_if_not_implemented(*args, **kwargs):
try:
return func(*args, **kwargs)
except panko.NotImplementedError as e:
raise testcase.TestSkipped(six.text_type(e))
except webtest.app.AppError as e:
if 'not implemented' in six.text_type(e):
raise testcase.TestSkipped(six.text_type(e))
raise
return skip_if_not_implemented
class SkipNotImplementedMeta(type):
def __new__(cls, name, bases, local):
for attr in local:
value = local[attr]
if callable(value) and (
attr.startswith('test_') or attr == 'setUp'):
local[attr] = _skip_decorator(value)
return type.__new__(cls, name, bases, local)
panko-7.0.0/panko/tests/mocks.py 0000664 0001750 0001750 00000006656 13551610116 016624 0 ustar zuul zuul 0000000 0000000
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import happybase
class MockHBaseTable(happybase.Table):
def __init__(self, name, connection, data_prefix):
# data_prefix is added to all rows which are written
# in this test. It allows to divide data from different tests
self.data_prefix = data_prefix
# We create happybase Table with prefix from
# PANKO_TEST_HBASE_TABLE_PREFIX
prefix = os.getenv("PANKO_TEST_HBASE_TABLE_PREFIX", 'test')
separator = os.getenv(
"PANKO_TEST_HBASE_TABLE_PREFIX_SEPARATOR", '_')
super(MockHBaseTable, self).__init__(
"%s%s%s" % (prefix, separator, name),
connection)
def put(self, row, *args, **kwargs):
row = self.data_prefix + row
return super(MockHBaseTable, self).put(row, *args,
**kwargs)
def scan(self, row_start=None, row_stop=None, row_prefix=None,
columns=None, filter=None, timestamp=None,
include_timestamp=False, batch_size=10, scan_batching=None,
limit=None, sorted_columns=False):
# Add data prefix for row parameters
# row_prefix could not be combined with row_start or row_stop
if not row_start and not row_stop:
row_prefix = self.data_prefix + (row_prefix or "")
row_start = None
row_stop = None
elif row_start and not row_stop:
# Adding data_prefix to row_start and row_stop does not work
# if it looks like row_start = %data_prefix%foo,
# row_stop = %data_prefix, because row_start > row_stop
filter = self._update_filter_row(filter)
row_start = self.data_prefix + row_start
else:
row_start = self.data_prefix + (row_start or "")
row_stop = self.data_prefix + (row_stop or "")
gen = super(MockHBaseTable, self).scan(row_start, row_stop,
row_prefix, columns,
filter, timestamp,
include_timestamp, batch_size,
scan_batching, limit,
sorted_columns)
data_prefix_len = len(self.data_prefix)
# Restore original row format
for row, data in gen:
yield (row[data_prefix_len:], data)
def row(self, row, *args, **kwargs):
row = self.data_prefix + row
return super(MockHBaseTable, self).row(row, *args, **kwargs)
def delete(self, row, *args, **kwargs):
row = self.data_prefix + row
return super(MockHBaseTable, self).delete(row, *args, **kwargs)
def _update_filter_row(self, filter):
if filter:
return "PrefixFilter(%s) AND %s" % (self.data_prefix, filter)
else:
return "PrefixFilter(%s)" % self.data_prefix
panko-7.0.0/panko/tests/__init__.py 0000664 0001750 0001750 00000000000 13551610116 017220 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/ 0000775 0001750 0001750 00000000000 13551610251 017263 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/publisher/ 0000775 0001750 0001750 00000000000 13551610251 021260 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/publisher/__init__.py 0000664 0001750 0001750 00000000000 13551610116 023357 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/storage/ 0000775 0001750 0001750 00000000000 13551610251 020727 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/storage/test_impl_mongodb.py 0000664 0001750 0001750 00000004333 13551610116 025011 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for panko/storage/impl_mongodb.py
.. note::
In order to run the tests against another MongoDB server set the
environment variable PANKO_TEST_MONGODB_URL to point to a MongoDB
server before running the tests.
"""
from panko.storage import impl_mongodb
from panko.tests import base as test_base
from panko.tests import db as tests_db
@tests_db.run_with('mongodb')
class IndexTest(tests_db.TestBase):
def test_event_ttl_index_absent(self):
# create a fake index and check it is deleted
self.conn.clear_expired_data(-1)
self.assertNotIn("event_ttl",
self.conn.db.event.index_information())
self.conn.clear_expired_data(456789)
self.assertEqual(456789,
self.conn.db.event.index_information()
["event_ttl"]['expireAfterSeconds'])
def test_event_ttl_index_present(self):
self.conn.clear_expired_data(456789)
self.assertEqual(456789,
self.conn.db.event.index_information()
["event_ttl"]['expireAfterSeconds'])
self.conn.clear_expired_data(-1)
self.assertNotIn("event_ttl",
self.conn.db.event.index_information())
class CapabilitiesTest(test_base.BaseTestCase):
# Check the returned capabilities list, which is specific to each DB
# driver
def test_capabilities(self):
expected_capabilities = {
'events': {'query': {'simple': True}},
}
actual_capabilities = impl_mongodb.Connection.get_capabilities()
self.assertEqual(expected_capabilities, actual_capabilities)
panko-7.0.0/panko/tests/functional/storage/test_storage_scenarios.py 0000664 0001750 0001750 00000054021 13551610116 026054 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for DB backend implementation test"""
import datetime
import operator
import mock
from oslo_utils import timeutils
from panko import storage
from panko.storage import models
from panko.tests import db as tests_db
class EventTestBase(tests_db.TestBase):
"""Separate test base class.
We don't want to inherit all the Meter stuff.
"""
def setUp(self):
super(EventTestBase, self).setUp()
self.prepare_data()
def prepare_data(self):
self.models = []
base = 0
self.start = datetime.datetime(2013, 12, 31, 5, 0)
now = self.start
for event_type in ['Foo', 'Bar', 'Zoo', 'Foo', 'Bar', 'Zoo']:
trait_models = [models.Trait(name, dtype, value)
for name, dtype, value in [
('trait_A', models.Trait.TEXT_TYPE,
"my_%s_text" % event_type),
('trait_B', models.Trait.INT_TYPE,
base + 1),
('trait_C', models.Trait.FLOAT_TYPE,
float(base) + 0.123456),
('trait_D', models.Trait.DATETIME_TYPE,
now)]]
self.models.append(
models.Event("id_%s_%d" % (event_type, base),
event_type, now, trait_models,
{'status': {'nested': 'started'}}))
base += 100
now = now + datetime.timedelta(hours=1)
self.end = now
self.conn.record_events(self.models)
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
class EventTTLTest(EventTestBase):
@mock.patch.object(timeutils, 'utcnow')
def test_clear_expired_data(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime(2013, 12, 31, 10, 0)
self.conn.clear_expired_data(3600)
events = list(self.conn.get_events(storage.EventFilter()))
self.assertEqual(2, len(events))
event_types = list(self.conn.get_event_types())
self.assertEqual(['Bar', 'Zoo'], event_types)
for event_type in event_types:
trait_types = list(self.conn.get_trait_types(event_type))
self.assertEqual(4, len(trait_types))
traits = list(self.conn.get_traits(event_type))
self.assertEqual(4, len(traits))
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb')
class EventTest(EventTestBase):
def test_duplicate_message_id(self):
now = datetime.datetime.utcnow()
m = [models.Event("1", "Foo", now, None, {}),
models.Event("1", "Zoo", now, [], {})]
with mock.patch('%s.LOG' %
self.conn.record_events.__module__) as log:
self.conn.record_events(m)
self.assertEqual(1, log.debug.call_count)
def test_bad_event(self):
now = datetime.datetime.utcnow()
broken_event = models.Event("1", "Foo", now, None, {})
del(broken_event.__dict__['raw'])
m = [broken_event, broken_event]
with mock.patch('%s.LOG' %
self.conn.record_events.__module__) as log:
self.assertRaises(AttributeError, self.conn.record_events, m)
# ensure that record_events does not break on first error but
# delays exception and tries to record each event.
self.assertEqual(2, log.exception.call_count)
class BigIntegerTest(EventTestBase):
def test_trait_bigint(self):
big = 99999999999999
new_events = [models.Event(
"id_testid", "MessageIDTest", self.start,
[models.Trait('int', models.Trait.INT_TYPE, big)], {})]
self.conn.record_events(new_events)
class GetEventTest(EventTestBase):
def test_generated_is_datetime(self):
event_filter = storage.EventFilter(self.start, self.end)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(6, len(events))
for i, event in enumerate(events):
self.assertIsInstance(event.generated, datetime.datetime)
self.assertEqual(event.generated,
self.models[i].generated)
model_traits = self.models[i].traits
for j, trait in enumerate(event.traits):
if trait.dtype == models.Trait.DATETIME_TYPE:
self.assertIsInstance(trait.value, datetime.datetime)
self.assertEqual(trait.value, model_traits[j].value)
def test_simple_get(self):
event_filter = storage.EventFilter(self.start, self.end)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(6, len(events))
start_time = None
for i, type in enumerate(['Foo', 'Bar', 'Zoo']):
self.assertEqual(type, events[i].event_type)
self.assertEqual(4, len(events[i].traits))
# Ensure sorted results ...
if start_time is not None:
# Python 2.6 has no assertLess :(
self.assertTrue(start_time < events[i].generated)
start_time = events[i].generated
def test_simple_get_event_type(self):
expected_trait_values = {
'id_Bar_100': {
'trait_A': 'my_Bar_text',
'trait_B': 101,
'trait_C': 100.123456,
'trait_D': self.start + datetime.timedelta(hours=1)
},
'id_Bar_400': {
'trait_A': 'my_Bar_text',
'trait_B': 401,
'trait_C': 400.123456,
'trait_D': self.start + datetime.timedelta(hours=4)
}
}
event_filter = storage.EventFilter(self.start, self.end, "Bar")
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Bar", events[0].event_type)
self.assertEqual("Bar", events[1].event_type)
self.assertEqual(4, len(events[0].traits))
self.assertEqual(4, len(events[1].traits))
for event in events:
trait_values = expected_trait_values.get(event.message_id,
None)
if not trait_values:
self.fail("Unexpected event ID returned:" % event.message_id)
for trait in event.traits:
expected_val = trait_values.get(trait.name)
if not expected_val:
self.fail("Unexpected trait type: %s" % trait.dtype)
self.assertEqual(expected_val, trait.value)
def test_get_event_trait_filter(self):
trait_filters = [{'key': 'trait_B', 'integer': 101}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Bar", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
def test_get_event_trait_filter_op_string(self):
trait_filters = [{'key': 'trait_A', 'string': 'my_Foo_text',
'op': 'eq'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Foo", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
trait_filters[0].update({'key': 'trait_A', 'op': 'lt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Bar", events[0].event_type)
trait_filters[0].update({'key': 'trait_A', 'op': 'le'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Bar", events[1].event_type)
trait_filters[0].update({'key': 'trait_A', 'op': 'ne'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Zoo", events[3].event_type)
trait_filters[0].update({'key': 'trait_A', 'op': 'gt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Zoo", events[0].event_type)
trait_filters[0].update({'key': 'trait_A', 'op': 'ge'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Foo", events[2].event_type)
def test_get_event_trait_filter_op_integer(self):
trait_filters = [{'key': 'trait_B', 'integer': 101, 'op': 'eq'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Bar", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
trait_filters[0].update({'key': 'trait_B', 'op': 'lt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Foo", events[0].event_type)
trait_filters[0].update({'key': 'trait_B', 'op': 'le'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Bar", events[1].event_type)
trait_filters[0].update({'key': 'trait_B', 'op': 'ne'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(5, len(events))
self.assertEqual("Zoo", events[4].event_type)
trait_filters[0].update({'key': 'trait_B', 'op': 'gt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Zoo", events[0].event_type)
trait_filters[0].update({'key': 'trait_B', 'op': 'ge'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(5, len(events))
self.assertEqual("Foo", events[2].event_type)
def test_get_event_trait_filter_op_float(self):
trait_filters = [{'key': 'trait_C', 'float': 300.123456, 'op': 'eq'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Foo", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
trait_filters[0].update({'key': 'trait_C', 'op': 'lt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(3, len(events))
self.assertEqual("Zoo", events[2].event_type)
trait_filters[0].update({'key': 'trait_C', 'op': 'le'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Bar", events[1].event_type)
trait_filters[0].update({'key': 'trait_C', 'op': 'ne'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(5, len(events))
self.assertEqual("Zoo", events[2].event_type)
trait_filters[0].update({'key': 'trait_C', 'op': 'gt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
self.assertEqual("Bar", events[0].event_type)
trait_filters[0].update({'key': 'trait_C', 'op': 'ge'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(3, len(events))
self.assertEqual("Zoo", events[2].event_type)
def test_get_event_trait_filter_op_datetime(self):
trait_filters = [{'key': 'trait_D',
'datetime': self.start + datetime.timedelta(hours=2),
'op': 'eq'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Zoo", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
trait_filters[0].update({'key': 'trait_D', 'op': 'lt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(2, len(events))
trait_filters[0].update({'key': 'trait_D', 'op': 'le'})
self.assertEqual("Bar", events[1].event_type)
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(3, len(events))
self.assertEqual("Bar", events[1].event_type)
trait_filters[0].update({'key': 'trait_D', 'op': 'ne'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(5, len(events))
self.assertEqual("Foo", events[2].event_type)
trait_filters[0].update({'key': 'trait_D', 'op': 'gt'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(3, len(events))
self.assertEqual("Zoo", events[2].event_type)
trait_filters[0].update({'key': 'trait_D', 'op': 'ge'})
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(4, len(events))
self.assertEqual("Bar", events[2].event_type)
def test_get_event_multiple_trait_filter(self):
trait_filters = [{'key': 'trait_B', 'integer': 1},
{'key': 'trait_C', 'float': 0.123456},
{'key': 'trait_A', 'string': 'my_Foo_text'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("Foo", events[0].event_type)
self.assertEqual(4, len(events[0].traits))
def test_get_event_multiple_trait_filter_expect_none(self):
trait_filters = [{'key': 'trait_B', 'integer': 1},
{'key': 'trait_A', 'string': 'my_Zoo_text'}]
event_filter = storage.EventFilter(self.start, self.end,
traits_filter=trait_filters)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(0, len(events))
def test_get_event_types(self):
event_types = [e for e in
self.conn.get_event_types()]
self.assertEqual(3, len(event_types))
self.assertIn("Bar", event_types)
self.assertIn("Foo", event_types)
self.assertIn("Zoo", event_types)
def test_get_trait_types(self):
trait_types = [tt for tt in
self.conn.get_trait_types("Foo")]
self.assertEqual(4, len(trait_types))
trait_type_names = map(lambda x: x['name'], trait_types)
self.assertIn("trait_A", trait_type_names)
self.assertIn("trait_B", trait_type_names)
self.assertIn("trait_C", trait_type_names)
self.assertIn("trait_D", trait_type_names)
def test_get_trait_types_unknown_event(self):
trait_types = [tt for tt in
self.conn.get_trait_types("Moo")]
self.assertEqual(0, len(trait_types))
def test_get_traits(self):
traits = self.conn.get_traits("Bar")
# format results in a way that makes them easier to work with
trait_dict = {}
for trait in traits:
trait_dict[trait.name] = trait.dtype
self.assertIn("trait_A", trait_dict)
self.assertEqual(models.Trait.TEXT_TYPE, trait_dict["trait_A"])
self.assertIn("trait_B", trait_dict)
self.assertEqual(models.Trait.INT_TYPE, trait_dict["trait_B"])
self.assertIn("trait_C", trait_dict)
self.assertEqual(models.Trait.FLOAT_TYPE, trait_dict["trait_C"])
self.assertIn("trait_D", trait_dict)
self.assertEqual(models.Trait.DATETIME_TYPE,
trait_dict["trait_D"])
def test_get_all_traits(self):
traits = self.conn.get_traits("Foo")
traits = sorted([t for t in traits], key=operator.attrgetter('dtype'))
self.assertEqual(8, len(traits))
trait = traits[0]
self.assertEqual("trait_A", trait.name)
self.assertEqual(models.Trait.TEXT_TYPE, trait.dtype)
def test_simple_get_event_no_traits(self):
new_events = [models.Event("id_notraits", "NoTraits",
self.start, [], {})]
self.conn.record_events(new_events)
event_filter = storage.EventFilter(
self.start, self.end, "NoTraits")
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
self.assertEqual("id_notraits", events[0].message_id)
self.assertEqual("NoTraits", events[0].event_type)
self.assertEqual(0, len(events[0].traits))
def test_simple_get_no_filters(self):
event_filter = storage.EventFilter(None, None, None)
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(6, len(events))
def test_get_by_message_id(self):
new_events = [models.Event("id_testid", "MessageIDTest",
self.start, [], {})]
self.conn.record_events(new_events)
event_filter = storage.EventFilter(message_id="id_testid")
events = [event for event in self.conn.get_events(event_filter)]
self.assertEqual(1, len(events))
event = events[0]
self.assertEqual("id_testid", event.message_id)
def test_simple_get_raw(self):
event_filter = storage.EventFilter()
events = [event for event in self.conn.get_events(event_filter)]
self.assertTrue(events)
self.assertEqual({'status': {'nested': 'started'}}, events[0].raw)
def test_trait_type_enforced_on_none(self):
new_events = [models.Event(
"id_testid", "MessageIDTest", self.start,
[models.Trait('text', models.Trait.TEXT_TYPE, ''),
models.Trait('int', models.Trait.INT_TYPE, 0),
models.Trait('float', models.Trait.FLOAT_TYPE, 0.0)],
{})]
self.conn.record_events(new_events)
event_filter = storage.EventFilter(message_id="id_testid")
events = [event for event in self.conn.get_events(event_filter)]
options = [(models.Trait.TEXT_TYPE, ''),
(models.Trait.INT_TYPE, 0.0),
(models.Trait.FLOAT_TYPE, 0.0)]
for trait in events[0].traits:
options.remove((trait.dtype, trait.value))
panko-7.0.0/panko/tests/functional/storage/__init__.py 0000664 0001750 0001750 00000000000 13551610116 023026 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/storage/test_impl_sqlalchemy.py 0000664 0001750 0001750 00000010426 13551610116 025526 0 ustar zuul zuul 0000000 0000000 # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for panko/storage/impl_sqlalchemy.py
.. note::
In order to run the tests against real SQL server set the environment
variable PANKO_TEST_SQL_URL to point to a SQL server before running
the tests.
"""
import datetime
from six.moves import reprlib
from panko.storage import impl_sqlalchemy as impl_sqla_event
from panko.storage import models
from panko.storage.sqlalchemy import models as sql_models
from panko.tests import base as test_base
from panko.tests import db as tests_db
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
class PankoBaseTest(tests_db.TestBase):
def test_panko_base(self):
base = sql_models.PankoBase()
base['key'] = 'value'
self.assertEqual('value', base['key'])
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
class EventTypeTest(tests_db.TestBase):
# EventType is a construct specific to sqlalchemy
# Not applicable to other drivers.
def setUp(self):
super(EventTypeTest, self).setUp()
self.session = self.conn._engine_facade.get_session()
self.session.begin()
def test_event_type_exists(self):
et1 = self.conn._get_or_create_event_type("foo", self.session)
self.assertTrue(et1.id >= 0)
et2 = self.conn._get_or_create_event_type("foo", self.session)
self.assertEqual(et2.id, et1.id)
self.assertEqual(et2.desc, et1.desc)
def test_event_type_unique(self):
et1 = self.conn._get_or_create_event_type("foo", self.session)
self.assertTrue(et1.id >= 0)
et2 = self.conn._get_or_create_event_type("blah", self.session)
self.assertNotEqual(et1.id, et2.id)
self.assertNotEqual(et1.desc, et2.desc)
# Test the method __repr__ returns a string
self.assertTrue(reprlib.repr(et2))
def tearDown(self):
self.session.rollback()
self.session.close()
super(EventTypeTest, self).tearDown()
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
class EventTest(tests_db.TestBase):
def _verify_data(self, trait, trait_table):
now = datetime.datetime.utcnow()
ev = models.Event('1', 'name', now, [trait], {})
self.conn.record_events([ev])
session = self.conn._engine_facade.get_session()
t_tables = [sql_models.TraitText, sql_models.TraitFloat,
sql_models.TraitInt, sql_models.TraitDatetime]
for table in t_tables:
if table == trait_table:
self.assertEqual(1, session.query(table).count())
else:
self.assertEqual(0, session.query(table).count())
def test_string_traits(self):
model = models.Trait("Foo", models.Trait.TEXT_TYPE, "my_text")
self._verify_data(model, sql_models.TraitText)
def test_int_traits(self):
model = models.Trait("Foo", models.Trait.INT_TYPE, 100)
self._verify_data(model, sql_models.TraitInt)
def test_float_traits(self):
model = models.Trait("Foo", models.Trait.FLOAT_TYPE, 123.456)
self._verify_data(model, sql_models.TraitFloat)
def test_datetime_traits(self):
now = datetime.datetime.utcnow()
model = models.Trait("Foo", models.Trait.DATETIME_TYPE, now)
self._verify_data(model, sql_models.TraitDatetime)
def test_event_repr(self):
ev = sql_models.Event('msg_id', None, False, {})
ev.id = 100
self.assertTrue(reprlib.repr(ev))
class CapabilitiesTest(test_base.BaseTestCase):
# Check the returned capabilities list, which is specific to each DB
# driver
def test_capabilities(self):
expected_capabilities = {
'events': {'query': {'simple': True}},
}
actual_capabilities = impl_sqla_event.Connection.get_capabilities()
self.assertEqual(expected_capabilities, actual_capabilities)
panko-7.0.0/panko/tests/functional/gabbi/ 0000775 0001750 0001750 00000000000 13551610251 020327 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/gabbi/test_gabbi.py 0000664 0001750 0001750 00000002173 13551610116 023007 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A test module to exercise the Panko API with gabbi
For the sake of exploratory development.
"""
import os
from gabbi import driver
from panko.tests.functional.gabbi import fixtures
TESTS_DIR = 'gabbits'
def load_tests(loader, tests, pattern):
"""Provide a TestSuite to the discovery process."""
test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
return driver.build_tests(test_dir, loader, host=None,
intercept=fixtures.setup_app,
fixture_module=fixtures)
panko-7.0.0/panko/tests/functional/gabbi/test_gabbi_prefix.py 0000664 0001750 0001750 00000002226 13551610116 024363 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A module to exercise the Panko API with gabbi with a URL prefix"""
import os
from gabbi import driver
from panko.tests.functional.gabbi import fixtures
TESTS_DIR = 'gabbits_prefix'
def load_tests(loader, tests, pattern):
"""Provide a TestSuite to the discovery process."""
test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
return driver.build_tests(test_dir, loader, host=None,
prefix='/telemetry',
intercept=fixtures.setup_app,
fixture_module=fixtures)
panko-7.0.0/panko/tests/functional/gabbi/gabbits_prefix/ 0000775 0001750 0001750 00000000000 13551610251 023317 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/gabbi/gabbits_prefix/basic.yaml 0000664 0001750 0001750 00000001003 13551610116 025256 0 ustar zuul zuul 0000000 0000000 #
# Confirm root reports the right data including a prefixed URL
#
fixtures:
- ConfigFixture
tests:
# Root gives us some information on where to go from here.
- name: quick root check
GET: /
response_headers:
content-type: application/json
response_strings:
- '"base": "application/json"'
response_json_paths:
versions.values.[0].status: stable
versions.values.[0].media-types.[0].base: application/json
response_strings:
- /telemetry/
panko-7.0.0/panko/tests/functional/gabbi/fixtures.py 0000664 0001750 0001750 00000011710 13551610116 022552 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2015 Red Hat. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fixtures used during Gabbi-based test runs."""
import datetime
import os
from unittest import case
from gabbi import fixture
from oslo_config import cfg
from oslo_policy import opts
from oslo_utils import fileutils
from oslo_utils import uuidutils
import six
from six.moves.urllib import parse as urlparse
import sqlalchemy_utils
from panko.api import app
from panko import service
from panko import storage
from panko.storage import models
# NOTE(chdent): Hack to restore semblance of global configuration to
# pass to the WSGI app used per test suite. LOAD_APP_KWARGS are the olso
# configuration, and the pecan application configuration of
# which the critical part is a reference to the current indexer.
LOAD_APP_KWARGS = None
def setup_app():
global LOAD_APP_KWARGS
return app.load_app(**LOAD_APP_KWARGS)
class ConfigFixture(fixture.GabbiFixture):
"""Establish the relevant configuration for a test run."""
def start_fixture(self):
"""Set up config."""
global LOAD_APP_KWARGS
self.conf = None
# Determine the database connection.
db_url = os.environ.get('PIFPAF_URL', "sqlite://").replace(
"mysql://", "mysql+pymysql://")
if not db_url:
raise case.SkipTest('No database connection configured')
conf = self.conf = service.prepare_service([], [])
opts.set_defaults(self.conf)
content = ('{"default": ""}')
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='policy',
suffix='.json')
conf.set_override("policy_file", self.tempfile,
group='oslo_policy')
conf.set_override(
'api_paste_config',
os.path.abspath('etc/panko/api_paste.ini')
)
parsed_url = list(urlparse.urlparse(db_url))
parsed_url[2] += '-%s' % uuidutils.generate_uuid(dashed=False)
db_url = urlparse.urlunparse(parsed_url)
conf.set_override('connection', db_url, group='database')
if (parsed_url[0].startswith("mysql")
or parsed_url[0].startswith("postgresql")):
sqlalchemy_utils.create_database(conf.database.connection)
self.conn = storage.get_connection_from_config(self.conf)
self.conn.upgrade()
LOAD_APP_KWARGS = {
'conf': conf, 'appname': 'panko+noauth',
}
def stop_fixture(self):
"""Reset the config and remove data."""
if self.conn:
self.conn.clear()
if self.conf:
storage.get_connection_from_config(self.conf).clear()
class EventDataFixture(ConfigFixture):
"""Instantiate some sample event data for use in testing."""
def start_fixture(self):
"""Create some events."""
super(EventDataFixture, self).start_fixture()
events = []
name_list = ['chocolate.chip', 'peanut.butter', 'sugar']
for ix, name in enumerate(name_list):
timestamp = datetime.datetime.utcnow()
message_id = 'fea1b15a-1d47-4175-85a5-a4bb2c72924{}'.format(ix)
traits = [models.Trait('type', 1, name),
models.Trait('ate', 2, ix)]
event = models.Event(message_id,
'cookies_{}'.format(name),
timestamp,
traits, {'nested': {'inside': 'value'}})
events.append(event)
self.conn.record_events(events)
class CORSConfigFixture(fixture.GabbiFixture):
"""Inject mock configuration for the CORS middleware."""
def start_fixture(self):
# Here we monkeypatch GroupAttr.__getattr__, necessary because the
# paste.ini method of initializing this middleware creates its own
# ConfigOpts instance, bypassing the regular config fixture.
def _mock_getattr(instance, key):
if key != 'allowed_origin':
return self._original_call_method(instance, key)
return "http://valid.example.com"
self._original_call_method = cfg.ConfigOpts.GroupAttr.__getattr__
cfg.ConfigOpts.GroupAttr.__getattr__ = _mock_getattr
def stop_fixture(self):
"""Remove the monkeypatch."""
cfg.ConfigOpts.GroupAttr.__getattr__ = self._original_call_method
panko-7.0.0/panko/tests/functional/gabbi/__init__.py 0000664 0001750 0001750 00000000000 13551610116 022426 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/gabbi/gabbits/ 0000775 0001750 0001750 00000000000 13551610251 021742 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/gabbi/gabbits/middleware.yaml 0000664 0001750 0001750 00000002105 13551610116 024741 0 ustar zuul zuul 0000000 0000000 #
# Test the middlewares. Just CORS for now.
#
fixtures:
- ConfigFixture
- CORSConfigFixture
tests:
- name: valid cors options
OPTIONS: /
status: 200
request_headers:
origin: http://valid.example.com
access-control-request-method: GET
response_headers:
access-control-allow-origin: http://valid.example.com
- name: invalid cors options
OPTIONS: /
status: 200
request_headers:
origin: http://invalid.example.com
access-control-request-method: GET
response_forbidden_headers:
- access-control-allow-origin
- name: valid cors get
GET: /
status: 200
request_headers:
origin: http://valid.example.com
access-control-request-method: GET
response_headers:
access-control-allow-origin: http://valid.example.com
- name: invalid cors get
GET: /
status: 200
request_headers:
origin: http://invalid.example.com
response_forbidden_headers:
- access-control-allow-origin
panko-7.0.0/panko/tests/functional/gabbi/gabbits/api-events-with-data.yaml 0000664 0001750 0001750 00000016561 13551610116 026572 0 ustar zuul zuul 0000000 0000000 # These test run against the Events API with data preloaded into the datastore.
fixtures:
- ConfigFixture
- EventDataFixture
tests:
# this attempts to get all the events and checks to make sure they are valid
- name: get all events
GET: /v2/events
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
verbose: True
response_json_paths:
$[/event_type].[0].event_type: cookies_chocolate.chip
$[/event_type].[0].traits[/value].[0].value: '0'
$[/event_type].[0].traits[/value].[1].value: chocolate.chip
$[/event_type].[0].raw.nested.inside: value
$[/event_type].[1].event_type: cookies_peanut.butter
$[/event_type].[1].traits[/name].[0].name: ate
$[/event_type].[1].traits[/name].[1].name: type
$[/event_type].[1].raw.nested.inside: value
$[/event_type].[2].event_type: cookies_sugar
$[/event_type].[2].traits[/type].[0].type: integer
$[/event_type].[2].traits[/type].[1].type: string
$[/event_type].[2].raw.nested.inside: value
# this attempts to get all the events with invalid parameters and expects a 400
- name: get events with bad params
GET: /v2/events?bad_Stuff_here
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 400
# this attempts to query the events with the correct parameterized query syntax
# and expects a matching event
- name: get events that match query
GET: /v2/events?q.field=event_type&q.op=eq&q.type=string&q.value=cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_json_paths:
$[/event_type].[0].event_type: cookies_chocolate.chip
$[/event_type].[0].traits[/value].[1].value: chocolate.chip
# this attempts to query the events with the correct data query syntax and
# expects a matching event
- name: get events that match query via data
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
q:
- field: event_type
op: eq
type: string
value: cookies_chocolate.chip
response_headers:
content-type: application/json
response_json_paths:
$[/event_type].[0].event_type: cookies_chocolate.chip
$[/event_type].[0].traits[/value].[1].value: chocolate.chip
# this attempts to query the events with the correct parameterized query syntax
# but a bad field name and expects an empty list
- name: get events that match bad query
GET: /v2/events?q.field=bad_field&q.op=eq&q.type=string&q.value=cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the correct data query syntax and
# a bad field name and expects an empty list
- name: get events that match bad query via data
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
q:
- field: bad_field
op: eq
type: string
value: cookies_chocolate.chip
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the wrong data query syntax missing the
# q object but supplying the field list and a bad field name and expects a 400
- name: get events that match bad query via data list
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
- field: bad_field
op: eq
type: string
value: cookies_chocolate.chip
status: 400
# Get a single event by message_id should return an event
- name: get a single event
GET: /v2/events/fea1b15a-1d47-4175-85a5-a4bb2c729240
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_json_paths:
$.event_type: cookies_chocolate.chip
$.traits[/value].[0].value: '0'
$.traits[/value].[1].value: chocolate.chip
# Get a single event by message_id no data is present so should return a 404
- name: get a single event that does not exist
GET: /v2/events/bad-id
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 404
# Get all the event types should return a list of event types
- name: get all event types
GET: /v2/event_types
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- cookies_chocolate.chip
- cookies_peanut.butter
- cookies_sugar
# Get a single event type by valid name, this API is unused and should return a 404
- name: get event types for good event_type unused api
GET: /v2/event_types/cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 404
# Get a single event type by invalid name, this API is unused and should return a 404
- name: get event types for bad event_type unused api
GET: /v2/event_types/bad_event_type
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 404
# Get all traits for a valid event type should return an list of traits
- name: get all traits for event type
GET: /v2/event_types/cookies_chocolate.chip/traits
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_json_paths:
$.[0].type: string
$.[1].name: ate
# Get all traits for an invalid event type should return an empty list
- name: get all traits names for event type bad event type
GET: /v2/event_types/bad_event_type/traits
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# Get all traits of type ate for a valid event type should return an list of
# traits
- name: get all traits of type ate for event type
GET: /v2/event_types/cookies_chocolate.chip/traits/ate
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_json_paths:
$.[0].name: ate
$.[0].value: '0'
# Get all traits of type ate for an invalid event type should return an empty
# list
- name: get all traits of type for event type bad event type
GET: /v2/event_types/bad_event_type/traits/ate
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# Get all traits of type bad_trait_name for a valid event type should return an
# empty list
- name: get all traits of type instances for event type bad trait name
GET: /v2/event_types/cookies_chocolate.chip/traits/bad_trait_name
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
panko-7.0.0/panko/tests/functional/gabbi/gabbits/api-events-no-data.yaml 0000664 0001750 0001750 00000013650 13551610116 026227 0 ustar zuul zuul 0000000 0000000 # These test run against the Events API with no data preloaded into the
# datastore. This allows us to verify that requests are still processed
# normally even if data is missing for that endpoint.
fixtures:
- ConfigFixture
tests:
# this attempts to get all the events and expects an empty list back
- name: get all events
GET: /v2/events
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to get all the events with no role/user/project
# info in header and expects a 403
- name: get events with bad headers
GET: /v2/events
status: 403
# this attempts to get all the events with no user/project
# info in header and expects a 403
- name: get events with admin only header
GET: /v2/events
request_headers:
X-Roles: admin
status: 403
# this attempts to get all the events with no project
# info in header and expects a 403
- name: get events with no project header
GET: /v2/events
request_headers:
X-Roles: admin
X-User-Id: user1
status: 403
# this attempts to get all the events with no user
# info in header and expects a 403
- name: get events with no user header
GET: /v2/events
request_headers:
X-Roles: admin
X-Project-Id: project1
status: 403
# this attempts to get all the events with invalid parameters and expects a 400
- name: get events with bad params
GET: /v2/events?bad_Stuff_here
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 400
# this attempts to query the events with the correct parameterized query syntax
# and expects an empty list
- name: get events that match query
GET: /v2/events?q.field=event_type&q.op=eq&q.type=string&q.value=cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the correct data query syntax and
# expects an empty list
- name: get events that match query via request data
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
q:
- field: event_type
op: eq
type: string
value: cookies_chocolate.chip
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the correct parameterized query syntax
# but a bad field name and expects an empty list
- name: get events that match bad query
GET: /v2/events?q.field=bad_field&q.op=eq&q.type=string&q.value=cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the correct data query syntax and
# a bad field name and expects an empty list
- name: get events that match bad query via request data
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
q:
- field: bad_field
op: eq
type: string
value: cookies_chocolate.chip
response_headers:
content-type: application/json
response_strings:
- "[]"
# this attempts to query the events with the wrong data query syntax missing the
# q object but supplying the field list and a bad field name and expects a 400
- name: get events that match bad query via request data malformed list
GET: /v2/events
request_headers:
content-type: application/json
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
- field: bad_field
op: eq
type: string
value: cookies_chocolate.chip
status: 400
# this attempts to query the events with the wrong data query syntax missing the
# q object but supplying the field list along with a bad content-type. Should
# return a 400
- name: get events that match bad query via request data wrong type
GET: /v2/events
request_headers:
content-type: text/plain
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
data:
"field: bad_field op: eq type: string value: cookies_chocolate.chip xfail: True"
status: 415
# Get a single event by message_id no data is present so should return a 404
- name: get a single event
GET: /v2/events/fea1b15a-1d47-4175-85a5-a4bb2c729240
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 404
# Get all the event types should return an empty list
- name: get all event types
GET: /v2/event_types
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# Get a single event type by name, this API is unused and should return a 404
- name: get event types for good event_type unused api
GET: /v2/event_types/cookies_chocolate.chip
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
status: 404
# Get all traits for an event type should return an empty list
- name: get all traits for event type
GET: /v2/event_types/cookies_chocolate.chip/traits
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
# Get all traits named ate for an event type should return an empty list
- name: get all traits named ate for event type
GET: /v2/event_types/cookies_chocolate.chip/traits/ate
request_headers:
X-Roles: admin
X-User-Id: user1
X-Project-Id: project1
response_headers:
content-type: application/json
response_strings:
- "[]"
panko-7.0.0/panko/tests/functional/gabbi/gabbits/basic.yaml 0000664 0001750 0001750 00000001260 13551610116 023706 0 ustar zuul zuul 0000000 0000000 #
# Some simple tests just to confirm that the system works.
#
fixtures:
- ConfigFixture
tests:
# Root gives us some information on where to go from here.
- name: quick root check
GET: /
response_headers:
content-type: application/json
response_strings:
- '"base": "application/json"'
response_json_paths:
versions.values.[0].status: stable
versions.values.[0].media-types.[0].base: application/json
# NOTE(chdent): Ideally since / has a links ref to /v2, /v2 ought not 404!
- name: v2 visit
desc: this demonstrates a bug in the info in /
GET: $RESPONSE['versions.values.[0].links.[0].href']
status: 404
panko-7.0.0/panko/tests/functional/gabbi/gabbits/capabilities.yaml 0000664 0001750 0001750 00000000375 13551610116 025264 0 ustar zuul zuul 0000000 0000000 #
# Explore the capabilities API
#
fixtures:
- ConfigFixture
tests:
- name: get capabilities
desc: retrieve capabilities for the mongo store
GET: /v2/capabilities
response_json_paths:
$.event_storage.['storage:production_ready']: true
panko-7.0.0/panko/tests/functional/hooks/ 0000775 0001750 0001750 00000000000 13551610251 020406 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/hooks/post_test_hook.sh 0000775 0001750 0001750 00000003366 13551610116 024021 0 ustar zuul zuul 0000000 0000000 #!/bin/bash -xe
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is executed inside post_test_hook function in devstack gate.
function generate_testr_results {
if [ -f .testrepository/0 ]; then
sudo .tox/functional/bin/testr last --subunit > $WORKSPACE/testrepository.subunit
sudo mv $WORKSPACE/testrepository.subunit $BASE/logs/testrepository.subunit
sudo /usr/os-testr-env/bin/subunit2html $BASE/logs/testrepository.subunit $BASE/logs/testr_results.html
sudo gzip -9 $BASE/logs/testrepository.subunit
sudo gzip -9 $BASE/logs/testr_results.html
sudo chown $USER:$USER $BASE/logs/testrepository.subunit.gz $BASE/logs/testr_results.html.gz
sudo chmod a+r $BASE/logs/testrepository.subunit.gz $BASE/logs/testr_results.html.gz
fi
}
export PANKO_DIR="$BASE/new/panko"
# Go to the panko dir
cd $PANKO_DIR
if [[ -z "$STACK_USER" ]]; then
export STACK_USER=stack
fi
sudo chown -R $STACK_USER:stack $PANKO_DIR
# Run tests
echo "Running panko functional test suite"
set +e
# NOTE(ityaptin) Expected a script param which contains a backend name
PANKO_TEST_BACKEND="$1" sudo -E -H -u ${STACK_USER:-${USER}} tox -efunctional
EXIT_CODE=$?
set -e
# Collect and parse result
generate_testr_results
exit $EXIT_CODE
panko-7.0.0/panko/tests/functional/__init__.py 0000664 0001750 0001750 00000000000 13551610116 021362 0 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/api/ 0000775 0001750 0001750 00000000000 13551610251 020034 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/api/__init__.py 0000664 0001750 0001750 00000015206 13551610116 022151 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for API tests.
"""
from oslo_policy import opts
import webtest
from panko.api import app
from panko.api import rbac
from panko import service
from panko.tests import db as db_test_base
class FunctionalTest(db_test_base.TestBase):
"""Used for functional tests of Pecan controllers.
Used in case when you need to test your literal application and its
integration with the framework.
"""
PATH_PREFIX = ''
def setUp(self):
super(FunctionalTest, self).setUp()
self.CONF = service.prepare_service([], [])
opts.set_defaults(self.CONF)
self.CONF.set_override('api_paste_config',
self.path_get('etc/panko/api_paste.ini'))
self.app = self._make_app(self.CONF)
@staticmethod
def _make_app(conf):
return webtest.TestApp(app.load_app(conf, appname='panko+noauth'))
def tearDown(self):
super(FunctionalTest, self).tearDown()
rbac.reset()
def put_json(self, path, params, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP PUT request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
return self.post_json(path=path, params=params,
expect_errors=expect_errors,
headers=headers, extra_environ=extra_environ,
status=status, method="put")
def post_json(self, path, params, expect_errors=False, headers=None,
method="post", extra_environ=None, status=None):
"""Sends simulated HTTP POST request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param method: Request method type. Appropriate method function call
should be used rather than passing attribute in.
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = getattr(self.app, "%s_json" % method)(
str(full_path),
params=params,
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors
)
return response
def delete(self, path, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP DELETE request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = self.app.delete(str(full_path),
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors)
return response
def get_json(self, path, expect_errors=False, headers=None,
extra_environ=None, q=None, groupby=None, status=None,
override_params=None, **params):
"""Sends simulated HTTP GET request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param q: list of queries consisting of: field, value, op, and type
keys
:param groupby: list of fields to group by
:param status: Expected status code of response
:param override_params: literally encoded query param string
:param params: content for wsgi.input of request
"""
q = q or []
groupby = groupby or []
full_path = self.PATH_PREFIX + path
if override_params:
all_params = override_params
else:
query_params = {'q.field': [],
'q.value': [],
'q.op': [],
'q.type': [],
}
for query in q:
for name in ['field', 'op', 'value', 'type']:
query_params['q.%s' % name].append(query.get(name, ''))
all_params = {}
all_params.update(params)
if q:
all_params.update(query_params)
if groupby:
all_params.update({'groupby': groupby})
response = self.app.get(full_path,
params=all_params,
headers=headers,
extra_environ=extra_environ,
expect_errors=expect_errors,
status=status)
if not expect_errors:
response = response.json
return response
panko-7.0.0/panko/tests/functional/api/v2/ 0000775 0001750 0001750 00000000000 13551610251 020363 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/panko/tests/functional/api/v2/test_capabilities.py 0000664 0001750 0001750 00000002167 13551610116 024433 0 ustar zuul zuul 0000000 0000000 #
# Copyright Ericsson AB 2014. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from panko.tests.functional.api import v2 as tests_api
class TestCapabilitiesController(tests_api.FunctionalTest):
def setUp(self):
super(TestCapabilitiesController, self).setUp()
self.url = '/capabilities'
def test_capabilities(self):
data = self.get_json(self.url)
# check that capabilities data contains both 'api' and 'storage' fields
self.assertIsNotNone(data)
self.assertNotEqual({}, data)
self.assertIn('api', data)
self.assertIn('event_storage', data)
panko-7.0.0/panko/tests/functional/api/v2/test_app.py 0000664 0001750 0001750 00000010121 13551610116 022547 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 IBM Corp.
# Copyright 2013 Julien Danjou
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test basic panko-api app
"""
from panko.tests.functional.api import v2
class TestApiMiddleware(v2.FunctionalTest):
no_lang_translated_error = 'No lang translated error'
en_US_translated_error = 'en-US translated error'
def _fake_translate(self, message, user_locale):
if user_locale is None:
return self.no_lang_translated_error
else:
return self.en_US_translated_error
def test_json_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json,application/xml"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml;q=0.8, \
application/json"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"text/html,*/*"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertTrue(response.json['error_message'])
def test_xml_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml,*/*"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/xml", response.content_type)
self.assertEqual('error_message', response.xml.tag)
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json;q=0.8 \
,application/xml"}
)
self.assertEqual(404, response.status_int)
self.assertEqual("application/xml", response.content_type)
self.assertEqual('error_message', response.xml.tag)
panko-7.0.0/panko/tests/functional/api/v2/__init__.py 0000664 0001750 0001750 00000001305 13551610116 022473 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from panko.tests.functional import api
class FunctionalTest(api.FunctionalTest):
PATH_PREFIX = '/v2'
panko-7.0.0/panko/tests/functional/api/v2/test_event_scenarios.py 0000664 0001750 0001750 00000107007 13551610116 025170 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test event, event_type and trait retrieval."""
import datetime
from oslo_utils import uuidutils
import webtest.app
from panko.storage import models
from panko.tests import db as tests_db
from panko.tests.functional.api import v2
USER_ID = uuidutils.generate_uuid(dashed=False)
PROJ_ID = uuidutils.generate_uuid(dashed=False)
HEADERS = {"X-Roles": "admin",
"X-User-Id": USER_ID,
"X-Project-Id": PROJ_ID}
class EventTestBase(v2.FunctionalTest):
def setUp(self):
super(EventTestBase, self).setUp()
self._generate_models()
def _generate_models(self):
event_models = []
base = 0
self.s_time = datetime.datetime(2013, 12, 31, 5, 0)
self.trait_time = datetime.datetime(2013, 12, 31, 5, 0)
for event_type in ['Foo', 'Bar', 'Zoo']:
trait_models = [models.Trait(name, type, value)
for name, type, value in [
('trait_A', models.Trait.TEXT_TYPE,
"my_%s_text" % event_type),
('trait_B', models.Trait.INT_TYPE,
base + 1),
('trait_C', models.Trait.FLOAT_TYPE,
float(base) + 0.123456),
('trait_D', models.Trait.DATETIME_TYPE,
self.trait_time)]]
# Message ID for test will be 'base'. So, message ID for the first
# event will be '0', the second '100', and so on.
# trait_time in first event will be equal to self.trait_time
# (datetime.datetime(2013, 12, 31, 5, 0)), next will add 1 day, so
# second will be (datetime.datetime(2014, 01, 01, 5, 0)) and so on.
event_models.append(
models.Event(message_id=str(base),
event_type=event_type,
generated=self.trait_time,
traits=trait_models,
raw={'status': {'nested': 'started'}}))
base += 100
self.trait_time += datetime.timedelta(days=1)
self.conn.record_events(event_models)
class TestEventTypeAPI(EventTestBase):
PATH = '/event_types'
def test_event_types(self):
data = self.get_json(self.PATH, headers=HEADERS)
for event_type in ['Foo', 'Bar', 'Zoo']:
self.assertIn(event_type, data)
class TestTraitAPI(EventTestBase):
PATH = '/event_types/%s/traits'
def test_get_traits_for_event(self):
path = self.PATH % "Foo"
data = self.get_json(path, headers=HEADERS)
self.assertEqual(4, len(data))
def test_get_event_invalid_path(self):
data = self.get_json('/event_types/trait_A/', headers=HEADERS,
expect_errors=True)
self.assertEqual(404, data.status_int)
def test_get_traits_for_non_existent_event(self):
path = self.PATH % "NO_SUCH_EVENT_TYPE"
data = self.get_json(path, headers=HEADERS)
self.assertEqual([], data)
def test_get_trait_data_for_event(self):
path = (self.PATH % "Foo") + "/trait_A"
data = self.get_json(path, headers=HEADERS)
self.assertEqual(1, len(data))
self.assertEqual("trait_A", data[0]['name'])
path = (self.PATH % "Foo") + "/trait_B"
data = self.get_json(path, headers=HEADERS)
self.assertEqual(1, len(data))
self.assertEqual("trait_B", data[0]['name'])
self.assertEqual("1", data[0]['value'])
path = (self.PATH % "Foo") + "/trait_D"
data = self.get_json(path, headers=HEADERS)
self.assertEqual(1, len(data))
self.assertEqual("trait_D", data[0]['name'])
self.assertEqual((self.trait_time - datetime.timedelta(days=3)).
isoformat(), data[0]['value'])
def test_get_trait_data_for_non_existent_event(self):
path = (self.PATH % "NO_SUCH_EVENT") + "/trait_A"
data = self.get_json(path, headers=HEADERS)
self.assertEqual([], data)
def test_get_trait_data_for_non_existent_trait(self):
path = (self.PATH % "Foo") + "/no_such_trait"
data = self.get_json(path, headers=HEADERS)
self.assertEqual([], data)
class TestEventAPI(EventTestBase):
PATH = '/events'
def test_get_events(self):
data = self.get_json(self.PATH, headers=HEADERS)
self.assertEqual(3, len(data))
# We expect to get native UTC generated time back
trait_time = self.s_time
for event in data:
expected_generated = trait_time.isoformat()
self.assertIn(event['event_type'], ['Foo', 'Bar', 'Zoo'])
self.assertEqual(4, len(event['traits']))
self.assertEqual({'status': {'nested': 'started'}}, event['raw']),
self.assertEqual(expected_generated, event['generated'])
for trait_name in ['trait_A', 'trait_B',
'trait_C', 'trait_D']:
self.assertIn(trait_name, map(lambda x: x['name'],
event['traits']))
trait_time += datetime.timedelta(days=1)
def test_get_event_by_message_id(self):
event = self.get_json(self.PATH + "/100", headers=HEADERS)
expected_traits = [{'name': 'trait_A',
'type': 'string',
'value': 'my_Bar_text'},
{'name': 'trait_B',
'type': 'integer',
'value': '101'},
{'name': 'trait_C',
'type': 'float',
'value': '100.123456'},
{'name': 'trait_D',
'type': 'datetime',
'value': '2014-01-01T05:00:00'}]
self.assertEqual('100', event['message_id'])
self.assertEqual('Bar', event['event_type'])
self.assertEqual('2014-01-01T05:00:00', event['generated'])
self.assertEqual(expected_traits, event['traits'])
def test_get_event_by_message_id_no_such_id(self):
data = self.get_json(self.PATH + "/DNE", headers=HEADERS,
expect_errors=True)
self.assertEqual(404, data.status_int)
def test_get_events_filter_event_type(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'event_type',
'value': 'Foo'}])
self.assertEqual(1, len(data))
def test_get_events_filter_trait_no_type(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text'}])
self.assertEqual(1, len(data))
self.assertEqual('Foo', data[0]['event_type'])
def test_get_events_filter_trait_empty_type(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': ''}])
self.assertEqual(1, len(data))
self.assertEqual('Foo', data[0]['event_type'])
def test_get_events_filter_trait_invalid_type(self):
resp = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'whats-up'}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual("The data type whats-up is not supported. The "
"supported data type list is: [\'integer\', "
"\'float\', \'string\', \'datetime\']",
resp.json['error_message']['faultstring'])
def test_get_events_filter_operator_invalid_type(self):
resp = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'op': 'whats-up'}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual("Operator whats-up is not supported. The "
"supported operators are: (\'lt\', \'le\', "
"\'eq\', \'ne\', \'ge\', \'gt\')",
resp.json['error_message']['faultstring'])
def test_get_events_filter_start_timestamp(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'start_timestamp',
'op': 'ge',
'value': '2014-01-01T00:00:00'}])
self.assertEqual(2, len(data))
sorted_types = sorted([d['event_type'] for d in data])
event_types = ['Foo', 'Bar', 'Zoo']
self.assertEqual(sorted_types, sorted(event_types[1:]))
def test_get_events_filter_start_timestamp_invalid_op(self):
resp = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'start_timestamp',
'op': 'gt',
'value': '2014-01-01T00:00:00'}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual(u'Operator gt is not supported. Only'
' `ge\' operator is available for field'
' start_timestamp',
resp.json['error_message']['faultstring'])
def test_get_events_filter_end_timestamp(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'end_timestamp',
'op': 'le',
'value': '2014-01-03T00:00:00'}])
self.assertEqual(3, len(data))
event_types = ['Foo', 'Bar', 'Zoo']
sorted_types = sorted([d['event_type'] for d in data])
self.assertEqual(sorted_types, sorted(event_types[:3]))
def test_get_events_filter_end_timestamp_invalid_op(self):
resp = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'end_timestamp',
'op': 'gt',
'value': '2014-01-03T00:00:00'}],
expect_errors=True)
self.assertEqual(400, resp.status_code)
self.assertEqual(u'Operator gt is not supported. Only'
' `le\' operator is available for field'
' end_timestamp',
resp.json['error_message']['faultstring'])
def test_get_events_filter_start_end_timestamp(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'start_timestamp',
'op': 'ge',
'value': '2014-01-02T00:00:00'},
{'field': 'end_timestamp',
'op': 'le',
'value': '2014-01-03T10:00:00'}])
self.assertEqual(1, len(data))
sorted_types = sorted([d['event_type'] for d in data])
event_types = ['Foo', 'Bar', 'Zoo']
self.assertEqual(sorted_types, sorted(event_types[2:3]))
def test_get_events_filter_text_trait(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string'}])
self.assertEqual(1, len(data))
self.assertEqual('Foo', data[0]['event_type'])
def test_get_events_filter_int_trait(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '101',
'type': 'integer'}])
self.assertEqual(1, len(data))
self.assertEqual('Bar', data[0]['event_type'])
traits = [x for x in data[0]['traits'] if x['name'] == 'trait_B']
self.assertEqual(1, len(traits))
self.assertEqual('integer', traits[0]['type'])
self.assertEqual('101', traits[0]['value'])
def test_get_events_filter_float_trait(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '200.123456',
'type': 'float'}])
self.assertEqual(1, len(data))
self.assertEqual('Zoo', data[0]['event_type'])
traits = [x for x in data[0]['traits'] if x['name'] == 'trait_C']
self.assertEqual(1, len(traits))
self.assertEqual('float', traits[0]['type'])
self.assertEqual('200.123456', traits[0]['value'])
def test_get_events_filter_datetime_trait(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2014-01-01T05:00:00',
'type': 'datetime'}])
self.assertEqual(1, len(data))
traits = [x for x in data[0]['traits'] if x['name'] == 'trait_D']
self.assertEqual(1, len(traits))
self.assertEqual('datetime', traits[0]['type'])
self.assertEqual('2014-01-01T05:00:00', traits[0]['value'])
def test_get_events_multiple_filters(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '1',
'type': 'integer'},
{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string'}])
self.assertEqual(1, len(data))
self.assertEqual('Foo', data[0]['event_type'])
def test_get_events_multiple_filters_no_matches(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '101',
'type': 'integer'},
{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string'}])
self.assertEqual(0, len(data))
def test_get_events_multiple_filters_same_field_different_values(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string'},
{'field': 'trait_A',
'value': 'my_Bar_text',
'type': 'string'}])
self.assertEqual(0, len(data))
def test_get_events_not_filters(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[])
self.assertEqual(3, len(data))
def test_get_events_filter_op_string(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string',
'op': 'eq'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Bar_text',
'type': 'string',
'op': 'lt'}])
self.assertEqual(0, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Zoo_text',
'type': 'string',
'op': 'le'}])
self.assertEqual(3, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Foo_text',
'type': 'string',
'op': 'ne'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Bar_text',
'type': 'string',
'op': 'gt'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_A',
'value': 'my_Zoo_text',
'type': 'string',
'op': 'ge'}])
self.assertEqual(1, len(data))
def test_get_events_filter_op_integer(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '101',
'type': 'integer',
'op': 'eq'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '201',
'type': 'integer',
'op': 'lt'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '1',
'type': 'integer',
'op': 'le'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '101',
'type': 'integer',
'op': 'ne'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '201',
'type': 'integer',
'op': 'gt'}])
self.assertEqual(0, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '1',
'type': 'integer',
'op': 'ge'}])
self.assertEqual(3, len(data))
def test_get_events_filter_op_float(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '100.123456',
'type': 'float',
'op': 'eq'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '200.123456',
'type': 'float',
'op': 'lt'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '0.123456',
'type': 'float',
'op': 'le'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '100.123456',
'type': 'float',
'op': 'ne'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '200.123456',
'type': 'float',
'op': 'gt'}])
self.assertEqual(0, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_C',
'value': '0.123456',
'type': 'float',
'op': 'ge'}])
self.assertEqual(3, len(data))
def test_get_events_filter_op_datatime(self):
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2014-01-01T05:00:00',
'type': 'datetime',
'op': 'eq'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2014-01-02T05:00:00',
'type': 'datetime',
'op': 'lt'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2013-12-31T05:00:00',
'type': 'datetime',
'op': 'le'}])
self.assertEqual(1, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2014-01-01T05:00:00',
'type': 'datetime',
'op': 'ne'}])
self.assertEqual(2, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2014-01-02T05:00:00',
'type': 'datetime',
'op': 'gt'}])
self.assertEqual(0, len(data))
data = self.get_json(self.PATH, headers=HEADERS,
q=[{'field': 'trait_D',
'value': '2013-12-31T05:00:00',
'type': 'datetime',
'op': 'ge'}])
self.assertEqual(3, len(data))
def test_get_events_filter_wrong_op(self):
self.assertRaises(webtest.app.AppError,
self.get_json, self.PATH, headers=HEADERS,
q=[{'field': 'trait_B',
'value': '1',
'type': 'integer',
'op': 'el'}])
class AclRestrictedEventTestBase(v2.FunctionalTest):
def setUp(self):
super(AclRestrictedEventTestBase, self).setUp()
self.admin_user_id = uuidutils.generate_uuid(dashed=False)
self.admin_proj_id = uuidutils.generate_uuid(dashed=False)
self.user_id = uuidutils.generate_uuid(dashed=False)
self.proj_id = uuidutils.generate_uuid(dashed=False)
self._generate_models()
def _generate_models(self):
event_models = []
self.s_time = datetime.datetime(2013, 12, 31, 5, 0)
event_models.append(
models.Event(message_id='1',
event_type='empty_ev',
generated=self.s_time,
traits=[models.Trait('random',
models.Trait.TEXT_TYPE,
'blah')],
raw={}))
event_models.append(
models.Event(message_id='2',
event_type='admin_ev',
generated=self.s_time,
traits=[models.Trait('project_id',
models.Trait.TEXT_TYPE,
self.admin_proj_id),
models.Trait('user_id',
models.Trait.TEXT_TYPE,
self.admin_user_id)],
raw={}))
event_models.append(
models.Event(message_id='3',
event_type='user_ev',
generated=self.s_time,
traits=[models.Trait('project_id',
models.Trait.TEXT_TYPE,
self.proj_id),
models.Trait('user_id',
models.Trait.TEXT_TYPE,
self.user_id)],
raw={}))
self.conn.record_events(event_models)
def test_non_admin_access(self):
a_headers = {"X-Roles": "member",
"X-User-Id": self.user_id,
"X-Project-Id": self.proj_id}
data = self.get_json('/events', headers=a_headers)
self.assertEqual(1, len(data))
self.assertEqual('user_ev', data[0]['event_type'])
def test_non_admin_access_single(self):
a_headers = {"X-Roles": "member",
"X-User-Id": self.user_id,
"X-Project-Id": self.proj_id}
data = self.get_json('/events/3', headers=a_headers)
self.assertEqual('user_ev', data['event_type'])
def test_non_admin_access_incorrect_user(self):
a_headers = {"X-Roles": "member",
"X-User-Id": 'blah',
"X-Project-Id": self.proj_id}
data = self.get_json('/events', headers=a_headers)
self.assertEqual(0, len(data))
def test_non_admin_access_incorrect_proj(self):
a_headers = {"X-Roles": "member",
"X-User-Id": self.user_id,
"X-Project-Id": 'blah'}
data = self.get_json('/events', headers=a_headers)
self.assertEqual(0, len(data))
def test_non_admin_access_single_invalid(self):
a_headers = {"X-Roles": "member",
"X-User-Id": self.user_id,
"X-Project-Id": self.proj_id}
data = self.get_json('/events/1', headers=a_headers,
expect_errors=True)
self.assertEqual(404, data.status_int)
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb', 'es')
def test_admin_access(self):
a_headers = {"X-Roles": "admin",
"X-User-Id": self.admin_user_id,
"X-Project-Id": self.admin_proj_id}
data = self.get_json('/events', headers=a_headers)
self.assertEqual(2, len(data))
self.assertEqual(set(['empty_ev', 'admin_ev']),
set(ev['event_type'] for ev in data))
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb', 'es')
def test_admin_access_trait_filter(self):
a_headers = {"X-Roles": "admin",
"X-User-Id": self.admin_user_id,
"X-Project-Id": self.admin_proj_id}
data = self.get_json('/events', headers=a_headers,
q=[{'field': 'random',
'value': 'blah',
'type': 'string',
'op': 'eq'}])
self.assertEqual(1, len(data))
self.assertEqual('empty_ev', data[0]['event_type'])
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb', 'es')
def test_admin_access_single(self):
a_headers = {"X-Roles": "admin",
"X-User-Id": self.admin_user_id,
"X-Project-Id": self.admin_proj_id}
data = self.get_json('/events/1', headers=a_headers)
self.assertEqual('empty_ev', data['event_type'])
data = self.get_json('/events/2', headers=a_headers)
self.assertEqual('admin_ev', data['event_type'])
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb', 'es')
def test_admin_access_all(self):
a_headers = {"X-Roles": "admin",
"X-User-Id": self.admin_user_id,
"X-Project-Id": self.admin_proj_id}
data = self.get_json('/events', headers=a_headers,
q=[{'field': 'all_tenants',
'value': 'True',
'type': 'string',
'op': 'eq'}])
self.assertEqual(3, len(data))
self.assertEqual(set(['empty_ev', 'admin_ev', 'user_ev']),
set(ev['event_type'] for ev in data))
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'mongodb', 'es')
def test_admin_access_trait_filter_no_access(self):
a_headers = {"X-Roles": "admin",
"X-User-Id": self.admin_user_id,
"X-Project-Id": self.admin_proj_id}
data = self.get_json('/events', headers=a_headers,
q=[{'field': 'user_id',
'value': self.user_id,
'type': 'string',
'op': 'eq'}])
self.assertEqual(0, len(data))
class EventRestrictionTestBase(v2.FunctionalTest):
def setUp(self):
super(EventRestrictionTestBase, self).setUp()
self.CONF.set_override('default_api_return_limit', 10, group='api')
self._generate_models()
def _generate_models(self):
event_models = []
base = 0
self.s_time = datetime.datetime(2013, 12, 31, 5, 0)
self.trait_time = datetime.datetime(2013, 12, 31, 5, 0)
for i in range(20):
trait_models = [models.Trait(name, type, value)
for name, type, value in [
('trait_A', models.Trait.TEXT_TYPE,
"my_text"),
('trait_B', models.Trait.INT_TYPE,
base + 1),
('trait_C', models.Trait.FLOAT_TYPE,
float(base) + 0.123456),
('trait_D', models.Trait.DATETIME_TYPE,
self.trait_time)]]
event_models.append(
models.Event(message_id=uuidutils.generate_uuid(),
event_type='foo.bar',
generated=self.trait_time,
traits=trait_models,
raw={'status': {'nested': 'started'}}))
self.trait_time += datetime.timedelta(seconds=1)
self.conn.record_events(event_models)
class TestEventRestriction(EventRestrictionTestBase):
def test_get_limit(self):
data = self.get_json('/events?limit=1', headers=HEADERS)
self.assertEqual(1, len(data))
def test_get_limit_negative(self):
self.assertRaises(webtest.app.AppError,
self.get_json, '/events?limit=-2', headers=HEADERS)
def test_get_limit_bigger(self):
data = self.get_json('/events?limit=100', headers=HEADERS)
self.assertEqual(20, len(data))
def test_get_default_limit(self):
data = self.get_json('/events', headers=HEADERS)
self.assertEqual(10, len(data))
@tests_db.run_with('mysql', 'pgsql', 'sqlite', 'postgresql')
class TestEventSort(EventTestBase):
PATH = '/events'
def test_get_limit_decr(self):
data = self.get_json(
'/events?limit=3&sort=generated:desc&sort=message_id',
headers=HEADERS)
self.assertEqual(3, len(data))
# check that data is sorted in most recent order
# self.s_time - start (earliest)
# self.trait_time - end (latest)
trait_time = self.trait_time
for event in data:
trait_time -= datetime.timedelta(days=1)
expected_generated = trait_time.isoformat()
self.assertEqual(expected_generated, event['generated'])
def test_get_limit_incr(self):
data = self.get_json(
'/events?limit=3&sort=generated:asc&sort=message_id',
headers=HEADERS)
self.assertEqual(3, len(data))
# check that data is sorted in decr order
# self.s_time - start (earliest)
# self.trait_time - end (latest)
trait_time = self.s_time
for event in data:
expected_generated = trait_time.isoformat()
self.assertEqual(expected_generated, event['generated'])
trait_time += datetime.timedelta(days=1)
def test_invalid_sort_key(self):
resp = self.get_json('/events?sort=invalid_key:desc',
headers=HEADERS,
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual("Invalid input for field/attribute sort. Value: "
"'invalid_key:desc'. the sort parameter should be"
" a pair of sort key and sort dir combined with "
"':', or only sort key specified and sort dir will "
"be default 'asc', the supported sort keys are: "
"('message_id', 'generated')",
resp.json['error_message']
['faultstring'])
def test_invalid_sort_dir(self):
resp = self.get_json('/events?sort=message_id:bah',
headers=HEADERS,
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual("Invalid input for field/attribute sort direction. "
"Value: 'message_id:bah'. the sort parameter "
"should be a pair of sort key and sort dir combined "
"with ':', or only sort key specified and sort dir "
"will be default 'asc', the supported sort "
"directions are: ('asc', 'desc')",
resp.json['error_message']
['faultstring'])
def test_sort_message_id(self):
data = self.get_json('/events?limit=3&sort=message_id:desc',
headers=HEADERS)
self.assertEqual(3, len(data))
result = [a['message_id'] for a in data]
self.assertEqual(['200', '100', '0'], result)
data = self.get_json('/events?limit=3&sort=message_id:asc',
headers=HEADERS)
self.assertEqual(3, len(data))
result = [a['message_id'] for a in data]
self.assertEqual(['0', '100', '200'], result)
def test_paginate_query(self):
data1 = self.get_json(
'/events?limit=1&sort=message_id:asc', headers=HEADERS)
self.assertEqual(1, len(data1))
self.assertEqual('0', data1[0]['message_id'])
data2 = self.get_json(
'/events?limit=3&marker=%s&sort=message_id:asc' %
data1[0]['message_id'], headers=HEADERS)
self.assertEqual(2, len(data2))
result = [a['message_id'] for a in data2]
self.assertEqual(['100', '200'], result)
panko-7.0.0/panko/tests/functional/api/v2/test_acl_scenarios.py 0000664 0001750 0001750 00000014466 13551610116 024614 0 ustar zuul zuul 0000000 0000000 #
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test ACL."""
import datetime
import os
from keystonemiddleware import fixture as ksm_fixture
from oslo_utils import fileutils
from oslo_utils import uuidutils
import six
import webtest
from panko.api import app
from panko.storage import models
from panko.tests.functional.api import v2
VALID_TOKEN = uuidutils.generate_uuid(dashed=False)
VALID_TOKEN2 = uuidutils.generate_uuid(dashed=False)
class TestAPIACL(v2.FunctionalTest):
def setUp(self):
super(TestAPIACL, self).setUp()
self.auth_token_fixture = self.useFixture(
ksm_fixture.AuthTokenFixture())
self.auth_token_fixture.add_token_data(
token_id=VALID_TOKEN,
# FIXME(morganfainberg): The project-id should be a proper uuid
project_id='123i2910',
role_list=['admin'],
user_name='user_id2',
user_id='user_id2',
is_v2=True
)
self.auth_token_fixture.add_token_data(
token_id=VALID_TOKEN2,
# FIXME(morganfainberg): The project-id should be a proper uuid
project_id='project-good',
role_list=['Member'],
user_name='user_id1',
user_id='user_id1',
is_v2=True)
def get_json(self, path, expect_errors=False, headers=None,
q=None, **params):
return super(TestAPIACL, self).get_json(path,
expect_errors=expect_errors,
headers=headers,
q=q or [],
**params)
@staticmethod
def _make_app(conf):
return webtest.TestApp(app.load_app(conf, appname='panko+keystone'))
class TestAPIEventACL(TestAPIACL):
PATH = '/events'
def test_non_admin_get_event_types(self):
data = self.get_json('/event_types', expect_errors=True,
headers={"X-Roles": "Member",
"X-Auth-Token": VALID_TOKEN2,
"X-Project-Id": "project-good"})
self.assertEqual(401, data.status_int)
class TestBaseApiEventRBAC(v2.FunctionalTest):
PATH = '/events'
def setUp(self):
super(TestBaseApiEventRBAC, self).setUp()
traits = [models.Trait('project_id', 1, 'project-good'),
models.Trait('user_id', 1, 'user-good')]
self.message_id = uuidutils.generate_uuid()
ev = models.Event(self.message_id, 'event_type',
datetime.datetime.now(), traits, {})
self.conn.record_events([ev])
def test_get_events_without_project(self):
headers_no_proj = {"X-Roles": "admin", "X-User-Id": "user-good"}
resp = self.get_json(self.PATH, expect_errors=True,
headers=headers_no_proj, status=403)
self.assertEqual(403, resp.status_int)
def test_get_events_without_user(self):
headers_no_user = {"X-Roles": "admin", "X-Project-Id": "project-good"}
resp = self.get_json(self.PATH, expect_errors=True,
headers=headers_no_user, status=403)
self.assertEqual(403, resp.status_int)
def test_get_events_without_scope(self):
headers_no_user_proj = {"X-Roles": "admin"}
resp = self.get_json(self.PATH,
expect_errors=True,
headers=headers_no_user_proj,
status=403)
self.assertEqual(403, resp.status_int)
def test_get_events(self):
headers = {"X-Roles": "Member", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH, headers=headers, status=200)
def test_get_event(self):
headers = {"X-Roles": "Member", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH + "/" + self.message_id, headers=headers,
status=200)
class TestApiEventAdminRBAC(TestBaseApiEventRBAC):
def _make_app(self, conf):
content = ('{"context_is_admin": "role:admin",'
'"telemetry:events:index": "rule:context_is_admin",'
'"telemetry:events:show": "rule:context_is_admin"}')
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='policy',
suffix='.json')
conf.set_override("policy_file", self.tempfile, group='oslo_policy')
return webtest.TestApp(app.load_app(conf, appname='panko+noauth'))
def tearDown(self):
os.remove(self.tempfile)
super(TestApiEventAdminRBAC, self).tearDown()
def test_get_events(self):
headers_rbac = {"X-Roles": "admin", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH, headers=headers_rbac, status=200)
def test_get_events_bad(self):
headers_rbac = {"X-Roles": "Member", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH, headers=headers_rbac, status=403)
def test_get_event(self):
headers = {"X-Roles": "admin", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH + "/" + self.message_id, headers=headers,
status=200)
def test_get_event_bad(self):
headers = {"X-Roles": "Member", "X-User-Id": "user-good",
"X-Project-Id": "project-good"}
self.get_json(self.PATH + "/" + self.message_id, headers=headers,
status=403)
panko-7.0.0/panko/tests/functional/test_bin.py 0000664 0001750 0001750 00000005560 13551610116 021452 0 ustar zuul zuul 0000000 0000000 # Copyright 2012 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import subprocess
from oslo_utils import fileutils
import six
from panko.tests import base
class BinTestCase(base.BaseTestCase):
def setUp(self):
super(BinTestCase, self).setUp()
content = ("[database]\n"
"connection=log://localhost\n")
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='panko',
suffix='.conf')
def tearDown(self):
super(BinTestCase, self).tearDown()
os.remove(self.tempfile)
def test_dbsync_run(self):
subp = subprocess.Popen(['panko-dbsync',
"--config-file=%s" % self.tempfile])
self.assertEqual(0, subp.wait())
def test_run_expirer_ttl_disabled(self):
subp = subprocess.Popen(['panko-expirer',
'-d',
"--config-file=%s" % self.tempfile],
stdout=subprocess.PIPE)
out, __ = subp.communicate()
self.assertEqual(0, subp.poll())
self.assertIn(b"Nothing to clean, database event "
b"time to live is disabled", out)
def _test_run_expirer_ttl_enabled(self, ttl_name, data_name):
content = ("[database]\n"
"%s=1\n"
"connection=log://localhost\n" % ttl_name)
if six.PY3:
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='panko',
suffix='.conf')
subp = subprocess.Popen(['panko-expirer',
'-d',
"--config-file=%s" % self.tempfile],
stdout=subprocess.PIPE)
out, __ = subp.communicate()
self.assertEqual(0, subp.poll())
msg = "Dropping %s data with TTL 1" % data_name
if six.PY3:
msg = msg.encode('utf-8')
self.assertIn(msg, out)
def test_run_expirer_ttl_enabled(self):
self._test_run_expirer_ttl_enabled('event_time_to_live', 'event')
panko-7.0.0/devstack/ 0000775 0001750 0001750 00000000000 13551610251 014453 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/devstack/README.rst 0000664 0001750 0001750 00000001435 13551610116 016145 0 ustar zuul zuul 0000000 0000000 ==========================
Enabling Panko in DevStack
==========================
1. Download Devstack::
git clone https://opendev.org/openstack/devstack
cd devstack
2. Add this repo as an external repository in ``local.conf`` file::
[[local|localrc]]
enable_plugin panko https://opendev.org/openstack/panko
To use stable branches, make sure devstack is on that branch, and specify
the branch name to enable_plugin, for example::
enable_plugin panko https://opendev.org/openstack/panko stable/newton
There are some options, such as PANKO_BACKEND, defined in
``panko/devstack/settings``, they can be used to configure the
installation of Panko. If you don't want to use their default value,
you can set a new one in ``local.conf``.
3. Run ``stack.sh``.
panko-7.0.0/devstack/lib/ 0000775 0001750 0001750 00000000000 13551610251 015221 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/devstack/lib/elasticsearch.sh 0000664 0001750 0001750 00000011166 13551610116 020374 0 ustar zuul zuul 0000000 0000000 #!/bin/bash -xe
# basic reference point for things like filecache
#
# TODO(sdague): once we have a few of these I imagine the download
# step can probably be factored out to something nicer
TOP_DIR=$(cd $(dirname "$0")/.. && pwd)
FILES=$TOP_DIR/files
source $TOP_DIR/stackrc
# Package source and version, all pkg files are expected to have
# something like this, as well as a way to override them.
ELASTICSEARCH_VERSION=${ELASTICSEARCH_VERSION:-1.7.5}
ELASTICSEARCH_BASEURL=${ELASTICSEARCH_BASEURL:-https://download.elasticsearch.org/elasticsearch/elasticsearch}
# Elastic search actual implementation
function wget_elasticsearch {
local file=${1}
if [ ! -f ${FILES}/${file} ]; then
wget $ELASTICSEARCH_BASEURL/${file} -O ${FILES}/${file}
fi
if [ ! -f ${FILES}/${file}.sha1.txt ]; then
wget $ELASTICSEARCH_BASEURL/${file}.sha1.txt -O ${FILES}/${file}.sha1.txt
fi
pushd ${FILES}; sha1sum ${file} > ${file}.sha1.gen; popd
if ! diff ${FILES}/${file}.sha1.gen ${FILES}/${file}.sha1.txt; then
echo "Invalid elasticsearch download. Could not install."
return 1
fi
return 0
}
function download_elasticsearch {
if is_ubuntu; then
wget_elasticsearch elasticsearch-${ELASTICSEARCH_VERSION}.deb
elif is_fedora || is_suse; then
wget_elasticsearch elasticsearch-${ELASTICSEARCH_VERSION}.noarch.rpm
fi
}
function configure_elasticsearch {
# currently a no op
:
}
function _check_elasticsearch_ready {
# poll elasticsearch to see if it's started
if ! wait_for_service 120 http://localhost:9200; then
die $LINENO "Maximum timeout reached. Could not connect to ElasticSearch"
fi
}
function start_elasticsearch {
if is_ubuntu; then
sudo /etc/init.d/elasticsearch start
_check_elasticsearch_ready
elif is_fedora; then
sudo /bin/systemctl start elasticsearch.service
_check_elasticsearch_ready
elif is_suse; then
sudo /usr/bin/systemctl start elasticsearch.service
_check_elasticsearch_ready
else
echo "Unsupported architecture...can not start elasticsearch."
fi
}
function stop_elasticsearch {
if is_ubuntu; then
sudo /etc/init.d/elasticsearch stop
elif is_fedora; then
sudo /bin/systemctl stop elasticsearch.service
elif is_suse ; then
sudo /usr/bin/systemctl stop elasticsearch.service
else
echo "Unsupported architecture...can not stop elasticsearch."
fi
}
function install_elasticsearch {
pip_install_gr elasticsearch
if is_package_installed elasticsearch; then
echo "Note: elasticsearch was already installed."
return
fi
if is_ubuntu; then
if [[ ${DISTRO} == "bionic" ]]; then
is_package_installed openjdk-8-jre-headless || install_package openjdk-8-jre-headless
else
is_package_installed default-jre-headless || install_package default-jre-headless
fi
sudo dpkg -i ${FILES}/elasticsearch-${ELASTICSEARCH_VERSION}.deb
sudo update-rc.d elasticsearch defaults 95 10
elif is_fedora; then
is_package_installed java-1.8.0-openjdk-headless || install_package java-1.8.0-openjdk-headless
yum_install ${FILES}/elasticsearch-${ELASTICSEARCH_VERSION}.noarch.rpm
sudo /bin/systemctl daemon-reload
sudo /bin/systemctl enable elasticsearch.service
elif is_suse; then
is_package_installed java-1_8_0-openjdk-headless || install_package java-1_8_0-openjdk-headless
zypper_install --no-gpg-checks ${FILES}/elasticsearch-${ELASTICSEARCH_VERSION}.noarch.rpm
sudo /usr/bin/systemctl daemon-reload
sudo /usr/bin/systemctl enable elasticsearch.service
else
echo "Unsupported install of elasticsearch on this architecture."
fi
}
function uninstall_elasticsearch {
if is_package_installed elasticsearch; then
if is_ubuntu; then
sudo apt-get purge elasticsearch
elif is_fedora; then
sudo yum remove elasticsearch
elif is_suse; then
sudo zypper rm elasticsearch
else
echo "Unsupported install of elasticsearch on this architecture."
fi
fi
}
# The PHASE dispatcher. All pkg files are expected to basically cargo
# cult the case statement.
PHASE=$1
echo "Phase is $PHASE"
case $PHASE in
download)
download_elasticsearch
;;
install)
install_elasticsearch
;;
configure)
configure_elasticsearch
;;
start)
start_elasticsearch
;;
stop)
stop_elasticsearch
;;
uninstall)
uninstall_elasticsearch
;;
esac
panko-7.0.0/devstack/plugin.sh 0000664 0001750 0001750 00000024376 13551610116 016321 0 ustar zuul zuul 0000000 0000000 # Install and start **Panko** service in devstack
#
# To enable Panko in devstack add an entry to local.conf that
# looks like
#
# [[local|localrc]]
# enable_plugin panko https://opendev.org/openstack/panko
#
# Several variables set in the localrc section adjust common behaviors
# of Panko (see within for additional settings):
#
# PANKO_BACKEND: Database backend (e.g. 'mysql', 'mongodb', 'es')
# Save trace setting
XTRACE=$(set +o | grep xtrace)
set -o xtrace
# Support potential entry-points console scripts in VENV or not
if [[ ${USE_VENV} = True ]]; then
PROJECT_VENV["panko"]=${PANKO_DIR}.venv
PANKO_BIN_DIR=${PROJECT_VENV["panko"]}/bin
else
PANKO_BIN_DIR=$(get_python_exec_prefix)
fi
if [ -z "$PANKO_DEPLOY" ]; then
# Default
PANKO_DEPLOY=simple
# Fallback to common wsgi devstack configuration
if [ "$ENABLE_HTTPD_MOD_WSGI_SERVICES" == "True" ]; then
PANKO_DEPLOY=mod_wsgi
# Deprecated config
elif [ -n "$PANKO_USE_MOD_WSGI" ] ; then
echo_summary "PANKO_USE_MOD_WSGI is deprecated, use PANKO_DEPLOY instead"
if [ "$PANKO_USE_MOD_WSGI" == True ]; then
PANKO_DEPLOY=mod_wsgi
fi
fi
fi
function panko_service_url {
echo "$PANKO_SERVICE_PROTOCOL://$PANKO_SERVICE_HOST:$PANKO_SERVICE_PORT"
}
# _panko_install_mongdb - Install mongodb and python lib.
function _panko_install_mongodb {
# Server package is the same on all
local packages=mongodb-server
if is_fedora; then
# mongodb client
packages="${packages} mongodb"
fi
install_package ${packages}
if is_fedora; then
restart_service mongod
else
restart_service mongodb
fi
# give time for service to restart
sleep 5
}
# Configure mod_wsgi
function _panko_config_apache_wsgi {
sudo mkdir -p $PANKO_WSGI_DIR
local panko_apache_conf=$(apache_site_config_for panko)
local venv_path=""
# Copy proxy vhost and wsgi file
sudo cp $PANKO_DIR/panko/api/app.wsgi $PANKO_WSGI_DIR/app
if [[ ${USE_VENV} = True ]]; then
venv_path="python-path=${PROJECT_VENV["panko"]}/lib/$(python_version)/site-packages"
fi
sudo cp $PANKO_DIR/devstack/apache-panko.template $panko_apache_conf
sudo sed -e "
s|%PORT%|$PANKO_SERVICE_PORT|g;
s|%APACHE_NAME%|$APACHE_NAME|g;
s|%WSGIAPP%|$PANKO_WSGI_DIR/app|g;
s|%USER%|$STACK_USER|g;
s|%VIRTUALENV%|$venv_path|g
" -i $panko_apache_conf
}
# Install required services for storage backends
function _panko_prepare_storage_backend {
if [ "$PANKO_BACKEND" = 'mongodb' ] ; then
pip_install_gr pymongo
_panko_install_mongodb
fi
if [ "$PANKO_BACKEND" = 'es' ] ; then
$PANKO_DIR/devstack/lib/elasticsearch.sh download
$PANKO_DIR/devstack/lib/elasticsearch.sh install
fi
}
# Create panko related accounts in Keystone
function _panko_create_accounts {
if is_service_enabled panko-api; then
create_service_user "panko" "admin"
get_or_create_service "panko" "event" "OpenStack Telemetry Service"
get_or_create_endpoint "event" \
"$REGION_NAME" \
"$(panko_service_url)" \
"$(panko_service_url)" \
"$(panko_service_url)"
fi
}
# Activities to do before panko has been installed.
function preinstall_panko {
echo_summary "Preinstall not in virtualenv context. Skipping."
}
# Remove WSGI files, disable and remove Apache vhost file
function _panko_cleanup_apache_wsgi {
sudo rm -f "$PANKO_WSGI_DIR"/*
sudo rmdir "$PANKO_WSGI_DIR"
sudo rm -f $(apache_site_config_for panko)
}
function _panko_drop_database {
if is_service_enabled panko-api ; then
if [ "$PANKO_BACKEND" = 'mongodb' ] ; then
mongo panko --eval "db.dropDatabase();"
elif [ "$PANKO_BACKEND" = 'es' ] ; then
curl -XDELETE "localhost:9200/events_*"
fi
fi
}
# cleanup_panko() - Remove residual data files, anything left over
# from previous runs that a clean run would need to clean up
function cleanup_panko {
if [ "$PANKO_DEPLOY" == "mod_wsgi" ]; then
_panko_cleanup_apache_wsgi
fi
_panko_drop_database
sudo rm -f "$PANKO_CONF_DIR"/*
sudo rmdir "$PANKO_CONF_DIR"
}
# Set configuration for storage backend.
function _panko_configure_storage_backend {
if [ "$PANKO_BACKEND" = 'mysql' ] || [ "$PANKO_BACKEND" = 'postgresql' ] ; then
iniset $PANKO_CONF database connection $(database_connection_url panko)
elif [ "$PANKO_BACKEND" = 'es' ] ; then
iniset $PANKO_CONF database connection es://localhost:9200
${TOP_DIR}/pkg/elasticsearch.sh start
elif [ "$PANKO_BACKEND" = 'mongodb' ] ; then
iniset $PANKO_CONF database connection mongodb://localhost:27017/panko
else
die $LINENO "Unable to configure unknown PANKO_BACKEND $PANKO_BACKEND"
fi
_panko_drop_database
}
# Configure Panko
function configure_panko {
local conffile
iniset $PANKO_CONF DEFAULT debug "$ENABLE_DEBUG_LOG_LEVEL"
# Set up logging
if [ "$SYSLOG" != "False" ]; then
iniset $PANKO_CONF DEFAULT use_syslog "True"
fi
# Format logging
if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ] && [ "$PANKO_DEPLOY" != "mod_wsgi" ]; then
setup_colorized_logging $PANKO_CONF DEFAULT
fi
# Install the declarative configuration files to
# the conf dir.
# NOTE(cdent): Do not make this a glob as it will conflict
# with rootwrap installation done elsewhere and also clobber
# panko.conf settings that have already been made.
# Anyway, explicit is better than implicit.
cp $PANKO_DIR/etc/panko/api_paste.ini $PANKO_CONF_DIR
configure_auth_token_middleware $PANKO_CONF panko $PANKO_AUTH_CACHE_DIR
# Configure storage
if is_service_enabled panko-api; then
_panko_configure_storage_backend
fi
if is_service_enabled panko-api && [ "$PANKO_DEPLOY" == "mod_wsgi" ]; then
_panko_config_apache_wsgi
elif [ "$PANKO_DEPLOY" == "uwsgi" ]; then
# iniset creates these files when it's called if they don't exist.
PANKO_UWSGI_FILE=$PANKO_CONF_DIR/panko-uwsgi.ini
rm -f "$PANKO_UWSGI_FILE"
iniset "$PANKO_UWSGI_FILE" uwsgi http $PANKO_SERVICE_HOST:$PANKO_SERVICE_PORT
iniset "$PANKO_UWSGI_FILE" uwsgi wsgi-file "$PANKO_DIR/panko/api/app.wsgi"
# This is running standalone
iniset "$PANKO_UWSGI_FILE" uwsgi master true
# Set die-on-term & exit-on-reload so that uwsgi shuts down
iniset "$PANKO_UWSGI_FILE" uwsgi die-on-term true
iniset "$PANKO_UWSGI_FILE" uwsgi exit-on-reload true
iniset "$PANKO_UWSGI_FILE" uwsgi threads 10
iniset "$PANKO_UWSGI_FILE" uwsgi processes $API_WORKERS
iniset "$PANKO_UWSGI_FILE" uwsgi enable-threads true
iniset "$PANKO_UWSGI_FILE" uwsgi plugins python
iniset "$PANKO_UWSGI_FILE" uwsgi lazy-apps true
# uwsgi recommends this to prevent thundering herd on accept.
iniset "$PANKO_UWSGI_FILE" uwsgi thunder-lock true
# Override the default size for headers from the 4k default.
iniset "$PANKO_UWSGI_FILE" uwsgi buffer-size 65535
# Make sure the client doesn't try to re-use the connection.
iniset "$PANKO_UWSGI_FILE" uwsgi add-header "Connection: close"
fi
}
# init_panko() - Initialize etc.
function init_panko {
# Get panko keystone settings in place
_panko_create_accounts
# Create cache dir
sudo install -d -o $STACK_USER $PANKO_AUTH_CACHE_DIR
rm -f $PANKO_AUTH_CACHE_DIR/*
if is_service_enabled panko-api && is_service_enabled mysql postgresql ; then
if [ "$PANKO_BACKEND" = 'mysql' ] || [ "$PANKO_BACKEND" = 'postgresql' ] || [ "$PANKO_BACKEND" = 'es' ] ; then
recreate_database panko
$PANKO_BIN_DIR/panko-dbsync
fi
fi
}
# Install Panko.
function install_panko {
if is_service_enabled panko-api; then
_panko_prepare_storage_backend
fi
setup_develop $PANKO_DIR
sudo install -d -o $STACK_USER -m 755 $PANKO_CONF_DIR
if [ "$PANKO_DEPLOY" == "mod_wsgi" ]; then
install_apache_wsgi
elif [ "$PANKO_DEPLOY" == "uwsgi" ]; then
pip_install uwsgi
fi
}
# start_panko() - Start running processes, including screen
function start_panko {
if [[ "$PANKO_DEPLOY" == "mod_wsgi" ]]; then
enable_apache_site panko
restart_apache_server
tail_log panko /var/log/$APACHE_NAME/panko.log
tail_log panko-api /var/log/$APACHE_NAME/panko_access.log
elif [ "$PANKO_DEPLOY" == "uwsgi" ]; then
run_process panko-api "$PANKO_BIN_DIR/uwsgi $PANKO_UWSGI_FILE"
else
run_process panko-api "$PANKO_BIN_DIR/panko-api -d -v --config-file $PANKO_CONF"
fi
}
# stop_panko() - Stop running processes
function stop_panko {
if is_service_enabled panko-api ; then
if [ "$PANKO_DEPLOY" == "mod_wsgi" ]; then
disable_apache_site panko
restart_apache_server
else
stop_process panko-api
fi
fi
}
# install_pankoclient() - Collect source and prepare
function install_pankoclient {
if use_library_from_git "python-pankoclient"; then
git_clone_by_name "python-pankoclient"
setup_dev_lib "python-pankoclient"
else
pip_install pankoclient
fi
}
# This is the main for plugin.sh
if is_service_enabled panko-api; then
if [[ "$1" == "stack" && "$2" == "pre-install" ]]; then
# Set up other services
echo_summary "Configuring system services for Panko"
preinstall_panko
elif [[ "$1" == "stack" && "$2" == "install" ]]; then
echo_summary "Installing Panko"
# Use stack_install_service here to account for virtualenv
stack_install_service panko
install_pankoclient
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
echo_summary "Configuring Panko"
configure_panko
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
echo_summary "Initializing Panko"
# Tidy base for panko
init_panko
# Start the services
start_panko
fi
if [[ "$1" == "unstack" ]]; then
echo_summary "Shutting Down Panko"
stop_panko
fi
if [[ "$1" == "clean" ]]; then
echo_summary "Cleaning Panko"
cleanup_panko
fi
fi
# Restore xtrace
$XTRACE
panko-7.0.0/devstack/settings 0000664 0001750 0001750 00000001516 13551610116 016241 0 ustar zuul zuul 0000000 0000000 enable_service panko-api
# Default directories
PANKO_DIR=$DEST/panko
PANKO_CONF_DIR=/etc/panko
PANKO_CONF=$PANKO_CONF_DIR/panko.conf
PANKO_AUTH_CACHE_DIR=${PANKO_AUTH_CACHE_DIR:-/var/cache/panko}
PANKO_WSGI_DIR=${PANKO_WSGI_DIR:-/var/www/panko}
# Set up database backend
PANKO_BACKEND=${PANKO_BACKEND:-mysql}
# Panko connection info.
PANKO_SERVICE_PROTOCOL=http
PANKO_SERVICE_HOST=$SERVICE_HOST
PANKO_SERVICE_PORT=${PANKO_SERVICE_PORT:-8977}
# PANKO_DEPLOY defines how Panko is deployed, allowed values:
# - mod_wsgi: Run Panko under Apache HTTPd mod_wsgi
# - simple: Run panko-api
# - uwsgi: Run Panko under uwsgi
# - : Fallback to PANKO_USE_MOD_WSGI or ENABLE_HTTPD_MOD_WSGI_SERVICES
PANKO_DEPLOY=${PANKO_DEPLOY}
# Get rid of this before done.
# Tell emacs to use shell-script-mode
## Local variables:
## mode: shell-script
## End:
panko-7.0.0/devstack/upgrade/ 0000775 0001750 0001750 00000000000 13551610251 016102 5 ustar zuul zuul 0000000 0000000 panko-7.0.0/devstack/upgrade/upgrade.sh 0000775 0001750 0001750 00000004676 13551610116 020105 0 ustar zuul zuul 0000000 0000000 #!/usr/bin/env bash
# ``upgrade-panko``
echo "*********************************************************************"
echo "Begin $0"
echo "*********************************************************************"
# Clean up any resources that may be in use
cleanup() {
set +o errexit
echo "*********************************************************************"
echo "ERROR: Abort $0"
echo "*********************************************************************"
# Kill ourselves to signal any calling process
trap 2; kill -2 $$
}
trap cleanup SIGHUP SIGINT SIGTERM
# Keep track of the grenade directory
RUN_DIR=$(cd $(dirname "$0") && pwd)
# Source params
. $GRENADE_DIR/grenaderc
# Import common functions
. $GRENADE_DIR/functions
# This script exits on an error so that errors don't compound and you see
# only the first error that occurred.
set -o errexit
# Save mongodb state (replace with snapshot)
# TODO(chdent): There used to be a 'register_db_to_save panko'
# which may wish to consider putting back in.
if grep -q 'connection *= *mongo' /etc/panko/panko.conf; then
mongodump --db panko --out $SAVE_DIR/panko-dump.$BASE_RELEASE
fi
# Upgrade Panko
# ==================
# Locate panko devstack plugin, the directory above the
# grenade plugin.
PANKO_DEVSTACK_DIR=$(dirname $(dirname $0))
# Get functions from current DevStack
. $TARGET_DEVSTACK_DIR/functions
. $TARGET_DEVSTACK_DIR/stackrc
. $TARGET_DEVSTACK_DIR/lib/apache
# Get panko functions from devstack plugin
. $PANKO_DEVSTACK_DIR/settings
# Print the commands being run so that we can see the command that triggers
# an error.
set -o xtrace
# Install the target panko
. $PANKO_DEVSTACK_DIR/plugin.sh stack install
# calls upgrade-panko for specific release
upgrade_project panko $RUN_DIR $BASE_DEVSTACK_BRANCH $TARGET_DEVSTACK_BRANCH
# Migrate the database
# NOTE(chdent): As we evolve BIN_DIR is likely to be defined, but
# currently it is not.
PANKO_BIN_DIR=$(dirname $(which panko-dbsync))
$PANKO_BIN_DIR/panko-dbsync || die $LINENO "DB sync error"
# Start Panko
start_panko
ensure_services_started panko-api
# Save mongodb state (replace with snapshot)
if grep -q 'connection *= *mongo' /etc/panko/panko.conf; then
mongodump --db panko --out $SAVE_DIR/panko-dump.$TARGET_RELEASE
fi
set +o xtrace
echo "*********************************************************************"
echo "SUCCESS: End $0"
echo "*********************************************************************"
panko-7.0.0/devstack/upgrade/settings 0000664 0001750 0001750 00000000463 13551610116 017670 0 ustar zuul zuul 0000000 0000000 register_project_for_upgrade panko
devstack_localrc base enable_plugin panko https://opendev.org/openstack/panko
devstack_localrc base enable_service panko-api tempest
devstack_localrc target enable_plugin panko https://opendev.org/openstack/panko
devstack_localrc target enable_service panko-api tempest
panko-7.0.0/devstack/upgrade/shutdown.sh 0000775 0001750 0001750 00000000746 13551610116 020323 0 ustar zuul zuul 0000000 0000000 #!/bin/bash
#
#
set -o errexit
. $GRENADE_DIR/grenaderc
. $GRENADE_DIR/functions
. $BASE_DEVSTACK_DIR/functions
. $BASE_DEVSTACK_DIR/stackrc # needed for status directory
. $BASE_DEVSTACK_DIR/lib/tls
. $BASE_DEVSTACK_DIR/lib/apache
# Locate the panko plugin and get its functions
PANKO_DEVSTACK_DIR=$(dirname $(dirname $0))
. $PANKO_DEVSTACK_DIR/plugin.sh
set -o xtrace
stop_panko
# ensure everything is stopped
SERVICES_DOWN="panko-api"
ensure_services_stopped $SERVICES_DOWN
panko-7.0.0/devstack/apache-panko.template 0000664 0001750 0001750 00000000736 13551610116 020545 0 ustar zuul zuul 0000000 0000000 Listen %PORT%
WSGIDaemonProcess panko-api processes=2 threads=10 user=%USER% display-name=%{GROUP} %VIRTUALENV%
WSGIProcessGroup panko-api
WSGIScriptAlias / %WSGIAPP%
WSGIApplicationGroup %{GLOBAL}
= 2.4>
ErrorLogFormat "%{cu}t %M"
ErrorLog /var/log/%APACHE_NAME%/panko.log
CustomLog /var/log/%APACHE_NAME%/panko_access.log combined
WSGISocketPrefix /var/run/%APACHE_NAME%
panko-7.0.0/LICENSE 0000664 0001750 0001750 00000023637 13551610116 013667 0 ustar zuul zuul 0000000 0000000
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
panko-7.0.0/run-tests.sh 0000775 0001750 0001750 00000000374 13551610116 015156 0 ustar zuul zuul 0000000 0000000 #!/bin/bash
set -e
set -o pipefail
# Run unit test
export OS_TEST_PATH=panko/tests/unit
stestr run $*
# Run functional test
export OS_TEST_PATH=panko/tests/functional/
for backend in $PANKO_BACKENDS; do
pifpaf run $backend -- stestr run $*
done
panko-7.0.0/tox.ini 0000664 0001750 0001750 00000004642 13551610116 014170 0 ustar zuul zuul 0000000 0000000 [tox]
minversion = 2.0
skipsdist = True
envlist = py{37,27}{,-mongodb,-mysql,-postgresql,-elasticsearch},pep8
[testenv]
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} {opts} {packages}
usedevelop = True
setenv =
PANKO_BACKENDS=mongodb mysql postgresql
mongodb: PANKO_BACKENDS=mongodb
mysql: PANKO_BACKENDS=mysql
postgresql: PANKO_BACKENDS=postgresql
elasticsearch: PANKO_BACKENDS=elasticsearch
# NOTE(tonyb): This project has chosen to *NOT* consume upper-constraints.txt
passenv = OS_TEST_TIMEOUT OS_STDOUT_CAPTURE OS_STDERR_CAPTURE OS_LOG_CAPTURE
whitelist_externals = bash
commands =
oslo-config-generator --config-file=etc/panko/panko-config-generator.conf
bash run-tests.sh {posargs}
[testenv:cover]
basepython = python3
setenv =
{[testenv]setenv}
PYTHON=coverage run --source panko --parallel-mode
commands =
stestr run {posargs}
coverage combine
coverage html -d cover
coverage xml -o cover/coverage.xml
coverage report
[testenv:pep8]
basepython = python3
deps = hacking<0.13,>=0.12.0
doc8
commands = flake8
doc8 {posargs}
[testenv:releasenotes]
basepython = python3
commands = sphinx-build -a -E -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
[testenv:genconfig]
basepython = python3
commands = oslo-config-generator --config-file=etc/panko/panko-config-generator.conf
[testenv:genpolicy]
basepython = python3
commands = oslopolicy-sample-generator --config-file=etc/panko/panko-policy-generator.conf
[testenv:docs]
basepython = python3
deps = -r{toxinidir}/doc/requirements.txt
commands = python setup.py build_sphinx
setenv = PYTHONHASHSEED=0
[testenv:pdf-docs]
basepython = python3
deps = {[testenv:docs]deps}
whitelist_externals =
rm
make
commands =
rm -rf doc/build/pdf
sphinx-build -W -b latex doc/source doc/build/pdf
make -C doc/build/pdf
[testenv:venv]
basepython = python3
commands = {posargs}
setenv = PYTHONHASHSEED=0
[doc8]
ignore = D000
ignore-path = .venv,.git,.tox,*panko/locale*,*lib/python*,panko.egg*,doc/build,doc/source/api,releasenotes/*
[flake8]
ignore =
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
show-source = True
[hacking]
import_exceptions =
panko.i18n
local-check-factory = panko.hacking.checks.factory
panko-7.0.0/babel.cfg 0000664 0001750 0001750 00000000021 13551610116 014366 0 ustar zuul zuul 0000000 0000000 [python: **.py]
panko-7.0.0/setup.py 0000664 0001750 0001750 00000001777 13551610116 014375 0 ustar zuul zuul 0000000 0000000 # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
panko-7.0.0/AUTHORS 0000664 0001750 0001750 00000032712 13551610251 013724 0 ustar zuul zuul 0000000 0000000 Abhishek Chanda
Abhishek Lekshmanan
Abhishek Lekshmanan
Adelina Tuvenie
Ajaya Agrawal
Akhil Hingane
Ala Rezmerita
Alessandro Pilotti
Alex Holden
Alexei Kornienko
Amy Fong
Ana Malagon
Ananya Chatterjee
Andreas Jaeger
Andreas Jaeger
Andrew Hutchings
Andrew Melton
Angus Lees
Angus Salkeld
Ann Kamyshnikova
Artur Svechnikov
Ashwin Agate
Balazs Gibizer
Bartosz Górski
Ben Nemec
Ben Nemec
Boris Bobrov
Boris Pavlovic
Brad Pokorny
Brant Knudson
Brian Cline
Brian Moss
Brooklyn Chen
Béla Vancsics
Can ZHANG
Cao Xuan Hoang
Cedric Soulas
Chad Lung
Chandan Kumar
Chandan Kumar
ChangBo Guo(gcb)
Chaozhe.Chen
ChenZheng
Chinmaya Bharadwaj
Chmouel Boudjnah
Chris Dent
Chris Dent
Christian Berendt
Christian Martinez
Christian Schwede
Chuck Short
Clark Boylan
Claudiu Belu
Corey Bryant
Cyril Roelandt
Cyril Roelandt
Damian Van Vuuren
Dan Florea
Dan Prince
Dan Travis
Dao Cong Tien
Darren Birkett
Davanum Srinivas
David Peraza
David Rabel
Dazhao
Debo~ Dutta
DeepaJon
Dina Belova
Dirk Mueller
Divya
Dong Ma
Doug Hellmann
Drew Thorstensen
Duong Ha-Quang
Edwin Zhai
Emilien Macchi
Emma Foley
Endre Karlson
Eoghan Glynn
Eoghan Glynn
Eric Brown
Eyal
Fabio Giannetti
Fei Long Wang
Feng Xi Yan
Fengqian Gao
Flavio Percoco
François Charlier
François Rossigneux
Frederic FAURE
Gangyi Luo
Gauvain Pocentek
Gerard Garcia
Ghanshyam Mann
Gordon Chung
Graham Binns
Guangyu Suo
Hang Liu
Hangdong Zhang
Hanxi
Hanxi Liu
Hanxi_Liu
Haomeng, Wang
Harri Hämäläinen
Hisashi Osanai
Hoang Trung Hieu
Hongbin Lu
Ian Wienand
Igor Degtiarov
Ihar Hrachyshka
Ildiko Vancsa
Ilya Sviridov
Ilya Tyaptin
Ionuț Arțăriși
Jake Liu
James E. Blair
James E. Blair
Jason Myers
Jason Zhang
Jay Lau
Jay Pipes
Jeremy Liu
Jeremy Stanley
Jie Li
Jim Rollenhagen
Jimmy McCrory
Joanna H. Huang
Joe Gordon
Joe H. Rahme
John H. Tran
John Herndon
JordanP
JuPing
Julien Danjou
Justin SB
KIYOHIRO ADACHI
Kamil Rykowski
Keith Byrne
Ken Pepple
Ken'ichi Ohmichi
Ken'ichi Ohmichi
Kennan
Kennan
Kevin McDonald
Kevin_Zheng
Kirill Bespalov
Kishore Juigil
Koert van der Veer
Komei Shimamura
Ladislav Smola
Lan Qi song
Lance Bragstad
Lena Novokshonova
Lianhao Lu
LinuxJedi
LiuSheng
Luis A. Garcia
Luis Pigueiras
Luo Gangyi
Luong Anh Tuan
Maho Koshiya
Marios Andreou
Mark McClain
Mark McLoughlin
Martin Geisler
Martin Kletzander
Mathew Odden
Mathieu Gagné
Matt Riedemann
Mehdi Abaakouk
Mehdi Abaakouk
Michael Krotscheck
Michael Still
Michał Jastrzębski
Miguel Alex Cantu
Miguel Grinberg
Mike Spreitzer
Ming Shuang Xian
Monsyne Dragon
Monty Taylor
Morgan Fainberg
Nadya Privalova
Nadya Shakhat
Nejc Saje
Nguyen Van Trung
Nick Barcet
Nicolas Barcet (nijaba)
Noorul Islam K M
Octavian Ciuhandu
OpenStack Release Bot
PanFengyun
PanFengyun
Patrick East
Paul Belanger
Pavlo Shchelokovskyy
Peter Portante
Phil Neal
Piyush Masrani
Pradeep Kilambi
Pradeep Kilambi
Pradeep Kumar Singh
Pradyumna Sampath
Pádraig Brady
Qiaowei Ren
Rabi Mishra
Rafael Folco
Rafael Rivero
Rich Bowen
Rikimaru Honjo
Rob Raymond
Robert Collins
Robert Mizielski
Rohit Jaiswal
Romain Soufflet
Roman Bogorodskiy
Rosario Di Somma
Ruslan Aliev
Russell Bryant
Ryan Petrello
Ryota MIBU
Saba Ahmed