ceres-master/0000755000175000017500000000000012206267256012466 5ustar jonasjonasceres-master/docs/0000755000175000017500000000000012206267256013416 5ustar jonasjonasceres-master/docs/index.rst0000644000175000017500000000010212206267256015250 0ustar jonasjonasCeres ===== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ceres-master/docs/Makefile0000644000175000017500000001267012206267256015064 0ustar jonasjonas# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ceres.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ceres.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/ceres" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ceres" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ceres-master/docs/_static/0000755000175000017500000000000012206267256015044 5ustar jonasjonasceres-master/docs/_static/default.css0000644000175000017500000003641112206267256017207 0ustar jonasjonas/* * rtd.css * ~~~~~~~~~~~~~~~ * * Sphinx stylesheet -- sphinxdoc theme. Originally created by * Armin Ronacher for Werkzeug. * * Customized for ReadTheDocs by Eric Pierce & Eric Holscher * * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ /* RTD colors * light blue: #e8ecef * medium blue: #8ca1af * dark blue: #465158 * dark grey: #444444 * * white hover: #d1d9df; * medium blue hover: #697983; * green highlight: #8ecc4c * light blue (project bar): #e8ecef */ @import url("basic.css"); /* PAGE LAYOUT -------------------------------------------------------------- */ body { font: 100%/1.5 "ff-meta-web-pro-1","ff-meta-web-pro-2",Arial,"Helvetica Neue",sans-serif; text-align: center; color: black; background-color: #465158; padding: 0; margin: 0; } div.document { text-align: left; background-color: #e8ecef; } div.bodywrapper { background-color: #ffffff; border-left: 1px solid #ccc; border-bottom: 1px solid #ccc; margin: 0 0 0 16em; } div.body { margin: 0; padding: 0.5em 1.3em; min-width: 20em; } div.related { font-size: 1em; background-color: #465158; } div.documentwrapper { float: left; width: 100%; background-color: #e8ecef; } /* HEADINGS --------------------------------------------------------------- */ h1 { margin: 0; padding: 0.7em 0 0.3em 0; font-size: 1.5em; line-height: 1.15; color: #111; clear: both; } h2 { margin: 2em 0 0.2em 0; font-size: 1.35em; padding: 0; color: #465158; } h3 { margin: 1em 0 -0.3em 0; font-size: 1.2em; color: #6c818f; } div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { color: black; } h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { display: none; margin: 0 0 0 0.3em; padding: 0 0.2em 0 0.2em; color: #aaa !important; } h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { display: inline; } h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, h5 a.anchor:hover, h6 a.anchor:hover { color: #777; background-color: #eee; } /* LINKS ------------------------------------------------------------------ */ /* Normal links get a pseudo-underline */ a { color: #444; text-decoration: none; border-bottom: 1px solid #ccc; } /* Links in sidebar, TOC, index trees and tables have no underline */ .sphinxsidebar a, .toctree-wrapper a, .indextable a, #indices-and-tables a { color: #444; text-decoration: none; border-bottom: none; } /* Most links get an underline-effect when hovered */ a:hover, div.toctree-wrapper a:hover, .indextable a:hover, #indices-and-tables a:hover { color: #111; text-decoration: none; border-bottom: 1px solid #111; } /* Footer links */ div.footer a { color: #86989B; text-decoration: none; border: none; } div.footer a:hover { color: #a6b8bb; text-decoration: underline; border: none; } /* Permalink anchor (subtle grey with a red hover) */ div.body a.headerlink { color: #ccc; font-size: 1em; margin-left: 6px; padding: 0 4px 0 4px; text-decoration: none; border: none; } div.body a.headerlink:hover { color: #c60f0f; border: none; } /* NAVIGATION BAR --------------------------------------------------------- */ div.related ul { height: 2.5em; } div.related ul li { margin: 0; padding: 0.65em 0; float: left; display: block; color: white; /* For the >> separators */ font-size: 0.8em; } div.related ul li.right { float: right; margin-right: 5px; color: transparent; /* Hide the | separators */ } /* "Breadcrumb" links in nav bar */ div.related ul li a { order: none; background-color: inherit; font-weight: bold; margin: 6px 0 6px 4px; line-height: 1.75em; color: #ffffff; padding: 0.4em 0.8em; border: none; border-radius: 3px; } /* previous / next / modules / index links look more like buttons */ div.related ul li.right a { margin: 0.375em 0; background-color: #697983; text-shadow: 0 1px rgba(0, 0, 0, 0.5); border-radius: 3px; -webkit-border-radius: 3px; -moz-border-radius: 3px; } /* All navbar links light up as buttons when hovered */ div.related ul li a:hover { background-color: #8ca1af; color: #ffffff; text-decoration: none; border-radius: 3px; -webkit-border-radius: 3px; -moz-border-radius: 3px; } /* Take extra precautions for tt within links */ a tt, div.related ul li a tt { background: inherit !important; color: inherit !important; } /* SIDEBAR ---------------------------------------------------------------- */ div.sphinxsidebarwrapper { padding: 0; } div.sphinxsidebar { margin: 0; margin-left: -100%; float: left; top: 3em; left: 0; padding: 0 1em; width: 14em; font-size: 1em; text-align: left; background-color: #e8ecef; } div.sphinxsidebar img { max-width: 12em; } div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p.logo { margin: 1.2em 0 0.3em 0; font-size: 1em; padding: 0; color: #222222; font-family: "ff-meta-web-pro-1", "ff-meta-web-pro-2", "Arial", "Helvetica Neue", sans-serif; } div.sphinxsidebar h3 a { color: #444444; } div.sphinxsidebar ul, div.sphinxsidebar p { margin-top: 0; padding-left: 0; line-height: 130%; background-color: #e8ecef; } /* No bullets for nested lists, but a little extra indentation */ div.sphinxsidebar ul ul { list-style-type: none; margin-left: 1.5em; padding: 0; } /* A little top/bottom padding to prevent adjacent links' borders * from overlapping each other */ div.sphinxsidebar ul li { padding: 1px 0; } /* A little left-padding to make these align with the ULs */ div.sphinxsidebar p.topless { padding-left: 0 0 0 1em; } /* Make these into hidden one-liners */ div.sphinxsidebar ul li, div.sphinxsidebar p.topless { white-space: nowrap; overflow: hidden; } /* ...which become visible when hovered */ div.sphinxsidebar ul li:hover, div.sphinxsidebar p.topless:hover { overflow: visible; } /* Search text box and "Go" button */ #searchbox { margin-top: 2em; margin-bottom: 1em; background: #ddd; padding: 0.5em; border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; } #searchbox h3 { margin-top: 0; } /* Make search box and button abut and have a border */ input, div.sphinxsidebar input { border: 1px solid #999; float: left; } /* Search textbox */ input[type="text"] { margin: 0; padding: 0 3px; height: 20px; width: 144px; border-top-left-radius: 3px; border-bottom-left-radius: 3px; -moz-border-radius-topleft: 3px; -moz-border-radius-bottomleft: 3px; -webkit-border-top-left-radius: 3px; -webkit-border-bottom-left-radius: 3px; } /* Search button */ input[type="submit"] { margin: 0 0 0 -1px; /* -1px prevents a double-border with textbox */ height: 22px; color: #444; background-color: #e8ecef; padding: 1px 4px; font-weight: bold; border-top-right-radius: 3px; border-bottom-right-radius: 3px; -moz-border-radius-topright: 3px; -moz-border-radius-bottomright: 3px; -webkit-border-top-right-radius: 3px; -webkit-border-bottom-right-radius: 3px; } input[type="submit"]:hover { color: #ffffff; background-color: #8ecc4c; } div.sphinxsidebar p.searchtip { clear: both; padding: 0.5em 0 0 0; background: #ddd; color: #666; font-size: 0.9em; } /* Sidebar links are unusual */ div.sphinxsidebar li a, div.sphinxsidebar p a { background: #e8ecef; /* In case links overlap main content */ border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; border: 1px solid transparent; /* To prevent things jumping around on hover */ padding: 0 5px 0 5px; } div.sphinxsidebar li a:hover, div.sphinxsidebar p a:hover { color: #111; text-decoration: none; border: 1px solid #888; } div.sphinxsidebar p.logo a { border: 0; } /* Tweak any link appearing in a heading */ div.sphinxsidebar h3 a { } /* OTHER STUFF ------------------------------------------------------------ */ cite, code, tt { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 0.95em; letter-spacing: 0.01em; } tt { background-color: #f2f2f2; color: #444; } tt.descname, tt.descclassname, tt.xref { border: 0; } hr { border: 1px solid #abc; margin: 2em; } pre, #_fontwidthtest { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; margin: 1em 2em; font-size: 0.95em; letter-spacing: 0.015em; line-height: 120%; padding: 0.5em; border: 1px solid #ccc; background-color: #eee; border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; } pre a { color: inherit; text-decoration: underline; } td.linenos pre { margin: 1em 0em; } td.code pre { margin: 1em 0em; } div.quotebar { background-color: #f8f8f8; max-width: 250px; float: right; padding: 2px 7px; border: 1px solid #ccc; } div.topic { background-color: #f8f8f8; } table { border-collapse: collapse; margin: 0 -0.5em 0 -0.5em; } table td, table th { padding: 0.2em 0.5em 0.2em 0.5em; } /* ADMONITIONS AND WARNINGS ------------------------------------------------- */ /* Shared by admonitions, warnings and sidebars */ div.admonition, div.warning, div.sidebar { font-size: 0.9em; margin: 2em; padding: 0; /* border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; */ } div.admonition p, div.warning p, div.sidebar p { margin: 0.5em 1em 0.5em 1em; padding: 0; } div.admonition pre, div.warning pre, div.sidebar pre { margin: 0.4em 1em 0.4em 1em; } div.admonition p.admonition-title, div.warning p.admonition-title, div.sidebar p.sidebar-title { margin: 0; padding: 0.1em 0 0.1em 0.5em; color: white; font-weight: bold; font-size: 1.1em; text-shadow: 0 1px rgba(0, 0, 0, 0.5); } div.admonition ul, div.admonition ol, div.warning ul, div.warning ol, div.sidebar ul, div.sidebar ol { margin: 0.1em 0.5em 0.5em 3em; padding: 0; } /* Admonitions and sidebars only */ div.admonition, div.sidebar { border: 1px solid #609060; background-color: #e9ffe9; } div.admonition p.admonition-title, div.sidebar p.sidebar-title { background-color: #70A070; border-bottom: 1px solid #609060; } /* Warnings only */ div.warning { border: 1px solid #900000; background-color: #ffe9e9; } div.warning p.admonition-title { background-color: #b04040; border-bottom: 1px solid #900000; } /* Sidebars only */ div.sidebar { max-width: 30%; } div.versioninfo { margin: 1em 0 0 0; border: 1px solid #ccc; background-color: #DDEAF0; padding: 8px; line-height: 1.3em; font-size: 0.9em; } .viewcode-back { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; } div.viewcode-block:target { background-color: #f4debf; border-top: 1px solid #ac9; border-bottom: 1px solid #ac9; } dl { margin: 1em 0 2.5em 0; } /* Highlight target when you click an internal link */ dt:target { background: #ffe080; } /* Don't highlight whole divs */ div.highlight { background: transparent; } /* But do highlight spans (so search results can be highlighted) */ span.highlight { background: #ffe080; } div.footer { background-color: #465158; color: #eeeeee; padding: 0 2em 2em 2em; clear: both; font-size: 0.8em; text-align: center; } p { margin: 0.8em 0 0.5em 0; } .section p img.math { margin: 0; } .section p img { margin: 1em 2em; } /* MOBILE LAYOUT -------------------------------------------------------------- */ @media screen and (max-width: 600px) { h1, h2, h3, h4, h5 { position: relative; } ul { padding-left: 1.25em; } div.bodywrapper a.headerlink, #indices-and-tables h1 a { color: #e6e6e6; font-size: 80%; float: right; line-height: 1.8; position: absolute; right: -0.7em; visibility: inherit; } div.bodywrapper h1 a.headerlink, #indices-and-tables h1 a { line-height: 1.5; } pre { font-size: 0.7em; overflow: auto; word-wrap: break-word; white-space: pre-wrap; } div.related ul { height: 2.5em; padding: 0; text-align: left; } div.related ul li { clear: both; color: #465158; padding: 0.2em 0; } div.related ul li:last-child { border-bottom: 1px dotted #8ca1af; padding-bottom: 0.4em; margin-bottom: 1em; width: 100%; } div.related ul li a { color: #465158; padding-right: 0; } div.related ul li a:hover { background: inherit; color: inherit; } div.related ul li.right { clear: none; padding: 0.65em 0; margin-bottom: 0.5em; } div.related ul li.right a { color: #fff; padding-right: 0.8em; } div.related ul li.right a:hover { background-color: #8ca1af; } div.body { clear: both; min-width: 0; word-wrap: break-word; } div.bodywrapper { margin: 0 0 0 0; } div.sphinxsidebar { float: none; margin: 0; width: auto; } div.sphinxsidebar input[type="text"] { height: 2em; line-height: 2em; width: 70%; } div.sphinxsidebar input[type="submit"] { height: 2em; margin-left: 0.5em; width: 20%; } div.sphinxsidebar p.searchtip { background: inherit; margin-bottom: 1em; } div.sphinxsidebar ul li, div.sphinxsidebar p.topless { white-space: normal; } .bodywrapper img { display: block; margin-left: auto; margin-right: auto; max-width: 100%; } div.documentwrapper { float: none; } div.admonition, div.warning, pre, blockquote { margin-left: 0em; margin-right: 0em; } .body p img { margin: 0; } #searchbox { background: transparent; } .related:not(:first-child) li { display: none; } .related:not(:first-child) li.right { display: block; } div.footer { padding: 1em; } .rtd_doc_footer .rtd-badge { float: none; margin: 1em auto; position: static; } .rtd_doc_footer .rtd-badge.revsys-inline { margin-right: auto; margin-bottom: 2em; } table.indextable { display: block; width: auto; } .indextable tr { display: block; } .indextable td { display: block; padding: 0; width: auto !important; } .indextable td dt { margin: 1em 0; } ul.search { margin-left: 0.25em; } ul.search li div.context { font-size: 90%; line-height: 1.1; margin-bottom: 1; margin-left: 0; } } ceres-master/docs/ceres.rst0000644000175000017500000000026112206267256015250 0ustar jonasjonas============================================ ceres ============================================ .. contents:: :local: .. automodule:: ceres :members: :undoc-members: ceres-master/docs/conf.py0000644000175000017500000001742412206267256014725 0ustar jonasjonas# -*- coding: utf-8 -*- # # ceres documentation build configuration file, created by # sphinx-quickstart on Thu Jan 3 04:15:28 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['.templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'ceres' copyright = u'2011, Chris Davis' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.10.0' # The full version, including alpha/beta/rc tags. release = '0.10.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'ceresdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'ceres.tex', u'ceres Documentation', u'Chris Davis', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'ceres', u'ceres Documentation', [u'Chris Davis'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'ceres', u'ceres Documentation', u'Chris Davis', 'ceres', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} ceres-master/.travis.yml0000644000175000017500000000032112206267256014573 0ustar jonasjonas# http://travis-ci.org/#!/graphite-project/ceres language: python python: - 2.6 - 2.7 install: - pip install -r requirements.txt --use-mirrors - python setup.py install script: - nosetests ceres-master/ceres.py0000644000175000017500000005233512206267256014151 0ustar jonasjonas# Copyright 2011 Chris Davis # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Ceres requires Python 2.6 or newer import os import struct import json import errno from math import isnan from itertools import izip from os.path import isdir, exists, join, dirname, abspath, getsize, getmtime from glob import glob from bisect import bisect_left TIMESTAMP_FORMAT = "!L" TIMESTAMP_SIZE = struct.calcsize(TIMESTAMP_FORMAT) DATAPOINT_FORMAT = "!d" DATAPOINT_SIZE = struct.calcsize(DATAPOINT_FORMAT) NAN = float('nan') PACKED_NAN = struct.pack(DATAPOINT_FORMAT, NAN) MAX_SLICE_GAP = 80 DEFAULT_TIMESTEP = 60 DEFAULT_SLICE_CACHING_BEHAVIOR = 'none' SLICE_PERMS = 0644 DIR_PERMS = 0755 class CeresTree: """Represents a tree of Ceres metrics contained within a single path on disk This is the primary Ceres API. :param root: The directory root of the Ceres tree See :func:`setDefaultSliceCachingBehavior` to adjust caching behavior """ def __init__(self, root): if isdir(root): self.root = abspath(root) else: raise ValueError("Invalid root directory '%s'" % root) self.nodeCache = {} def __repr__(self): return "" % (id(self), self.root) __str__ = __repr__ @classmethod def createTree(cls, root, **props): """Create and returns a new Ceres tree with the given properties :param root: The root directory of the new Ceres tree :keyword \*\*props: Arbitrary key-value properties to store as tree metadata :returns: :class:`CeresTree` """ ceresDir = join(root, '.ceres-tree') if not isdir(ceresDir): os.makedirs(ceresDir, DIR_PERMS) for prop,value in props.items(): propFile = join(ceresDir, prop) fh = open(propFile, 'w') fh.write(str(value)) fh.close() return cls(root) def walk(self, **kwargs): """Iterate through the nodes contained in this :class:`CeresTree` :keyword \*\*kwargs: Options to pass to `os.walk` :returns: An iterator yielding :class:`CeresNode` objects """ for (fsPath, subdirs, filenames) in os.walk(self.root, **kwargs): if CeresNode.isNodeDir(fsPath): nodePath = self.getNodePath(fsPath) yield CeresNode(self, nodePath, fsPath) def getFilesystemPath(self, nodePath): """Get the on-disk path of a Ceres node given a metric name""" return join(self.root, nodePath.replace('.', os.sep)) def getNodePath(self, fsPath): """Get the metric name of a Ceres node given the on-disk path""" fsPath = abspath(fsPath) if not fsPath.startswith(self.root): raise ValueError("path '%s' not beneath tree root '%s'" % (fsPath, self.root)) nodePath = fsPath[len(self.root):].strip(os.sep).replace(os.sep, '.') return nodePath def hasNode(self, nodePath): """Returns whether the Ceres tree contains the given metric""" return isdir(self.getFilesystemPath(nodePath)) def getNode(self, nodePath): """Returns a Ceres node given a metric name :param nodePath: A metric name :returns: :class:`CeresNode` or `None` """ if nodePath not in self.nodeCache: fsPath = self.getFilesystemPath(nodePath) if CeresNode.isNodeDir(fsPath): self.nodeCache[nodePath] = CeresNode(self, nodePath, fsPath) else: return None return self.nodeCache[nodePath] def find(self, nodePattern, fromTime=None, untilTime=None): """Find nodes which match a wildcard pattern, optionally filtering on a time range :keyword nodePattern: A glob-style metric wildcard :keyword fromTime: Optional interval start time in unix-epoch. :keyword untilTime: Optional interval end time in unix-epoch. :returns: An iterator yielding :class:`CeresNode` objects """ for fsPath in glob(self.getFilesystemPath(nodePattern)): if CeresNode.isNodeDir(fsPath): nodePath = self.getNodePath(fsPath) node = self.getNode(nodePath) if fromTime is None and untilTime is None: yield node elif node.hasDataForInterval(fromTime, untilTime): yield node def createNode(self, nodePath, **properties): """Creates a new metric given a new metric name and optional per-node metadata :keyword nodePath: The new metric name. :keyword \*\*properties: Arbitrary key-value properties to store as metric metadata. :returns: :class:`CeresNode` """ return CeresNode.create(self, nodePath, **properties) def store(self, nodePath, datapoints): """Store a list of datapoints associated with a metric :keyword nodePath: The metric name to write to :keyword datapoints: A list of datapoint tuples: (timestamp, value) """ node = self.getNode(nodePath) if node is None: raise NodeNotFound("The node '%s' does not exist in this tree" % nodePath) node.write(datapoints) def fetch(self, nodePath, fromTime, untilTime): """Fetch data within a given interval from the given metric :keyword nodePath: The metric name to fetch from :keyword fromTime: Requested interval start time in unix-epoch. :keyword untilTime: Requested interval end time in unix-epoch. :returns: :class:`TimeSeriesData` :raises: :class:`NodeNotFound`, :class:`InvalidRequest`, :class:`NoData` """ node = self.getNode(nodePath) if not node: raise NodeNotFound("the node '%s' does not exist in this tree" % nodePath) return node.read(fromTime, untilTime) class CeresNode(object): __slots__ = ('tree', 'nodePath', 'fsPath', 'metadataFile', 'timeStep', 'sliceCache', 'sliceCachingBehavior') def __init__(self, tree, nodePath, fsPath): self.tree = tree self.nodePath = nodePath self.fsPath = fsPath self.metadataFile = join(fsPath, '.ceres-node') self.timeStep = None self.sliceCache = None self.sliceCachingBehavior = DEFAULT_SLICE_CACHING_BEHAVIOR def __repr__(self): return "" % (id(self), self.nodePath) __str__ = __repr__ @classmethod def create(cls, tree, nodePath, **properties): # Create the node directory fsPath = tree.getFilesystemPath(nodePath) os.makedirs(fsPath, DIR_PERMS) # Create the initial metadata timeStep = properties['timeStep'] = properties.get('timeStep', DEFAULT_TIMESTEP) node = cls(tree, nodePath, fsPath) node.writeMetadata(properties) # Create the initial data file #now = int( time.time() ) #baseTime = now - (now % timeStep) #slice = CeresSlice.create(node, baseTime, timeStep) return node @staticmethod def isNodeDir(path): return isdir(path) and exists(join(path, '.ceres-node')) @classmethod def fromFilesystemPath(cls, fsPath): dirPath = dirname(fsPath) while True: ceresDir = join(dirPath, '.ceres-tree') if isdir(ceresDir): tree = CeresTree(dirPath) nodePath = tree.getNodePath(fsPath) return cls(tree, nodePath, fsPath) dirPath = dirname(dirPath) if dirPath == '/': raise ValueError("the path '%s' is not in a ceres tree" % fsPath) @property def slice_info(self): return [(slice.startTime, slice.endTime, slice.timeStep) for slice in self.slices] def readMetadata(self): metadata = json.load(open(self.metadataFile, 'r')) self.timeStep = int(metadata['timeStep']) return metadata def writeMetadata(self, metadata): self.timeStep = int(metadata['timeStep']) f = open(self.metadataFile, 'w') json.dump(metadata, f) f.close() @property def slices(self): if self.sliceCache: if self.sliceCachingBehavior == 'all': for slice in self.sliceCache: yield slice elif self.sliceCachingBehavior == 'latest': yield self.sliceCache infos = self.readSlices() for info in infos[1:]: yield CeresSlice(self, *info) else: if self.sliceCachingBehavior == 'all': self.sliceCache = [CeresSlice(self, *info) for info in self.readSlices()] for slice in self.sliceCache: yield slice elif self.sliceCachingBehavior == 'latest': infos = self.readSlices() if infos: self.sliceCache = CeresSlice(self, *infos[0]) yield self.sliceCache for info in infos[1:]: yield CeresSlice(self, *info) elif self.sliceCachingBehavior == 'none': for info in self.readSlices(): yield CeresSlice(self, *info) else: raise ValueError("invalid caching behavior configured '%s'" % self.sliceCachingBehavior) def readSlices(self): if not exists(self.fsPath): raise NodeDeleted() slice_info = [] for filename in os.listdir(self.fsPath): if filename.endswith('.slice'): startTime, timeStep = filename[:-6].split('@') slice_info.append((int(startTime), int(timeStep))) slice_info.sort(reverse=True) return slice_info def setSliceCachingBehavior(self, behavior): behavior = behavior.lower() if behavior not in ('none', 'all', 'latest'): raise ValueError("invalid caching behavior '%s'" % behavior) self.sliceCachingBehavior = behavior self.sliceCache = None def clearSliceCache(self): self.sliceCache = None def hasDataForInterval(self, fromTime, untilTime): slices = list(self.slices) if not slices: return False earliestData = slices[-1].startTime latestData = slices[0].endTime return ((fromTime is None) or (fromTime < latestData)) and \ ((untilTime is None) or (untilTime > earliestData)) def read(self, fromTime, untilTime): if self.timeStep is None: self.readMetadata() # Normalize the timestamps to fit proper intervals fromTime = int(fromTime - (fromTime % self.timeStep) + self.timeStep) untilTime = int(untilTime - (untilTime % self.timeStep) + self.timeStep) sliceBoundary = None # to know when to split up queries across slices resultValues = [] earliestData = None for slice in self.slices: # if the requested interval starts after the start of this slice if fromTime >= slice.startTime: try: series = slice.read(fromTime, untilTime) except NoData: break earliestData = series.startTime rightMissing = (untilTime - series.endTime) / self.timeStep rightNulls = [None for i in range(rightMissing - len(resultValues))] resultValues = series.values + rightNulls + resultValues break # or if slice contains data for part of the requested interval elif untilTime >= slice.startTime: # Split the request up if it straddles a slice boundary if (sliceBoundary is not None) and untilTime > sliceBoundary: requestUntilTime = sliceBoundary else: requestUntilTime = untilTime try: series = slice.read(slice.startTime, requestUntilTime) except NoData: continue earliestData = series.startTime rightMissing = (requestUntilTime - series.endTime) / self.timeStep rightNulls = [None for i in range(rightMissing)] resultValues = series.values + rightNulls + resultValues # this is the right-side boundary on the next iteration sliceBoundary = slice.startTime # The end of the requested interval predates all slices if earliestData is None: missing = int(untilTime - fromTime) / self.timeStep resultValues = [None for i in range(missing)] # Left pad nulls if the start of the requested interval predates all slices else: leftMissing = (earliestData - fromTime) / self.timeStep leftNulls = [None for i in range(leftMissing)] resultValues = leftNulls + resultValues return TimeSeriesData(fromTime, untilTime, self.timeStep, resultValues) def write(self, datapoints): if self.timeStep is None: self.readMetadata() if not datapoints: return sequences = self.compact(datapoints) needsEarlierSlice = [] # keep track of sequences that precede all existing slices while sequences: sequence = sequences.pop() timestamps = [t for t,v in sequence] beginningTime = timestamps[0] endingTime = timestamps[-1] sliceBoundary = None # used to prevent writing sequences across slice boundaries slicesExist = False for slice in self.slices: if slice.timeStep != self.timeStep: continue slicesExist = True # truncate sequence so it doesn't cross the slice boundaries if beginningTime >= slice.startTime: print slice.startTime if sliceBoundary is None: sequenceWithinSlice = sequence else: # index of highest timestamp that doesn't exceed sliceBoundary boundaryIndex = bisect_left(timestamps, sliceBoundary) sequenceWithinSlice = sequence[:boundaryIndex] try: slice.write(sequenceWithinSlice) except SliceGapTooLarge: newSlice = CeresSlice.create(self, beginningTime, slice.timeStep) newSlice.write(sequenceWithinSlice) self.sliceCache = None except SliceDeleted: self.sliceCache = None self.write(datapoints) # recurse to retry return sequence = [] break # sequence straddles the current slice, write the right side # left side will be taken up in the next slice down elif endingTime >= slice.startTime: # index of lowest timestamp that doesn't preceed slice.startTime boundaryIndex = bisect_left(timestamps, slice.startTime) sequenceWithinSlice = sequence[boundaryIndex:] # write the leftovers on the next earlier slice sequence = sequence[:boundaryIndex] slice.write(sequenceWithinSlice) if not sequence: break sliceBoundary = slice.startTime else: # list exhausted with stuff still to write needsEarlierSlice.append(sequence) if not slicesExist: sequences.append(sequence) needsEarlierSlice = sequences break for sequence in needsEarlierSlice: slice = CeresSlice.create(self, int(sequence[0][0]), self.timeStep) slice.write(sequence) self.clearSliceCache() def compact(self, datapoints): datapoints = sorted((int(timestamp), float(value)) for timestamp, value in datapoints if value is not None) sequences = [] sequence = [] minimumTimestamp = 0 # used to avoid duplicate intervals for timestamp, value in datapoints: timestamp -= timestamp % self.timeStep # round it down to a proper interval if not sequence: sequence.append((timestamp, value)) else: if not timestamp > minimumTimestamp: # drop duplicate intervals continue if timestamp == sequence[-1][0] + self.timeStep: # append contiguous datapoints sequence.append((timestamp, value)) else: # start a new sequence if not contiguous sequences.append(sequence) sequence = [(timestamp, value)] minimumTimestamp = timestamp if sequence: sequences.append(sequence) return sequences class CeresSlice(object): __slots__ = ('node', 'startTime', 'timeStep', 'fsPath') def __init__(self, node, startTime, timeStep): self.node = node self.startTime = startTime self.timeStep = timeStep self.fsPath = join(node.fsPath, '%d@%d.slice' % (startTime, timeStep)) def __repr__(self): return "" % (id(self), self.fsPath) __str__ = __repr__ @property def isEmpty(self): return getsize(self.fsPath) == 0 @property def endTime(self): return self.startTime + ((getsize(self.fsPath) / DATAPOINT_SIZE) * self.timeStep) @property def mtime(self): return getmtime(self.fsPath) @classmethod def create(cls, node, startTime, timeStep): slice = cls(node, startTime, timeStep) fileHandle = open(slice.fsPath, 'wb') fileHandle.close() os.chmod(slice.fsPath, SLICE_PERMS) return slice def read(self, fromTime, untilTime): timeOffset = int(fromTime) - self.startTime if timeOffset < 0: raise InvalidRequest("requested time range (%d, %d) preceeds this slice: %d" % (fromTime, untilTime, self.startTime)) pointOffset = timeOffset / self.timeStep byteOffset = pointOffset * DATAPOINT_SIZE if byteOffset >= getsize(self.fsPath): raise NoData() fileHandle = open(self.fsPath, 'rb') fileHandle.seek(byteOffset) timeRange = int(untilTime - fromTime) pointRange = timeRange / self.timeStep byteRange = pointRange * DATAPOINT_SIZE packedValues = fileHandle.read(byteRange) pointsReturned = len(packedValues) / DATAPOINT_SIZE format = '!' + ('d' * pointsReturned) values = struct.unpack(format, packedValues) values = [v if not isnan(v) else None for v in values] endTime = fromTime + (len(values) * self.timeStep) #print '[DEBUG slice.read] startTime=%s fromTime=%s untilTime=%s' % (self.startTime, fromTime, untilTime) #print '[DEBUG slice.read] timeInfo = (%s, %s, %s)' % (fromTime, endTime, self.timeStep) #print '[DEBUG slice.read] values = %s' % str(values) return TimeSeriesData(fromTime, endTime, self.timeStep, values) def write(self, sequence): beginningTime = sequence[0][0] timeOffset = beginningTime - self.startTime pointOffset = timeOffset / self.timeStep byteOffset = pointOffset * DATAPOINT_SIZE values = [v for t,v in sequence] format = '!' + ('d' * len(values)) packedValues = struct.pack(format, *values) try: filesize = getsize(self.fsPath) except OSError, e: if e.errno == errno.ENOENT: raise SliceDeleted() else: raise byteGap = byteOffset - filesize if byteGap > 0: # pad the allowable gap with nan's if byteGap > MAX_SLICE_GAP: raise SliceGapTooLarge() else: pointGap = byteGap / DATAPOINT_SIZE packedGap = PACKED_NAN * pointGap packedValues = packedGap + packedValues byteOffset -= byteGap with file(self.fsPath, 'r+b') as fileHandle: try: fileHandle.seek(byteOffset) except IOError: print " IOError: fsPath=%s byteOffset=%d size=%d sequence=%s" % (self.fsPath, byteOffset, filesize, sequence) raise fileHandle.write(packedValues) def deleteBefore(self, t): if not exists(self.fsPath): raise SliceDeleted() t = t - (t % self.timeStep) timeOffset = t - self.startTime if timeOffset < 0: return pointOffset = timeOffset / self.timeStep byteOffset = pointOffset * DATAPOINT_SIZE if not byteOffset: return self.node.clearSliceCache() with file(self.fsPath, 'r+b') as fileHandle: fileHandle.seek(byteOffset) fileData = fileHandle.read() if fileData: fileHandle.seek(0) fileHandle.write(fileData) fileHandle.truncate() fileHandle.close() newFsPath = join(dirname(self.fsPath), "%d@%d.slice" % (t, self.timeStep)) os.rename(self.fsPath, newFsPath) else: os.unlink(self.fsPath) raise SliceDeleted() def __cmp__(self, other): return cmp(self.startTime, other.startTime) class TimeSeriesData(object): __slots__ = ('startTime', 'endTime', 'timeStep', 'values') def __init__(self, startTime, endTime, timeStep, values): self.startTime = startTime self.endTime = endTime self.timeStep = timeStep self.values = values @property def timestamps(self): return xrange(self.startTime, self.endTime, self.timeStep) def __iter__(self): return izip(self.timestamps, self.values) def __len__(self): return len(self.values) def merge(self, other): for timestamp, value in other: if value is None: continue timestamp -= timestamp % self.timeStep if timestamp < self.startTime: continue index = int((timestamp - self.startTime) / self.timeStep) try: if self.values[index] is None: self.values[index] = value except IndexError: continue class CorruptNode(Exception): def __init__(self, node, problem): Exception.__init__(self, problem) self.node = node self.problem = problem class NoData(Exception): pass class NodeNotFound(Exception): pass class NodeDeleted(Exception): pass class InvalidRequest(Exception): pass class SliceGapTooLarge(Exception): "For internal use only" class SliceDeleted(Exception): pass def getTree(path): while path not in (os.sep, ''): if isdir(join(path, '.ceres-tree')): return CeresTree(path) path = dirname(path) def setDefaultSliceCachingBehavior(behavior): global DEFAULT_SLICE_CACHING_BEHAVIOR behavior = behavior.lower() if behavior not in ('none', 'all', 'latest'): raise ValueError("invalid caching behavior '%s'" % behavior) DEFAULT_SLICE_CACHING_BEHAVIOR = behavior ceres-master/LICENSE0000644000175000017500000002613612206267256013503 0ustar jonasjonas Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ceres-master/setup.py0000644000175000017500000000060312206267256014177 0ustar jonasjonas#!/usr/bin/env python import os from glob import glob from distutils.core import setup setup( name='ceres', version='0.10.0', url='https://github.com/graphite-project/ceres', author='Chris Davis', author_email='chrismd@gmail.com', license='Apache Software License 2.0', description='Distributable time-series database', py_modules=['ceres'], scripts=glob('bin/*') ) ceres-master/.gitignore0000644000175000017500000000005412206267256014455 0ustar jonasjonasMANIFEST build docs/_build dist *.log *.pyc ceres-master/bin/0000755000175000017500000000000012206267256013236 5ustar jonasjonasceres-master/bin/ceres-node-create0000755000175000017500000000142712206267256016455 0ustar jonasjonas#!/usr/bin/env python import sys import time from optparse import OptionParser from ceres import CeresTree, getTree parser = OptionParser(usage='''%prog [options] If --tree is specified, is taken as a node path Otherwise is taken as a filesystem path ''') parser.add_option('--tree', default=None) parser.add_option('--step', default=60, type='int', help="Default time step") options, args = parser.parse_args() if not args: parser.print_usage() sys.exit(1) if options.tree: nodePath = args[0] tree = CeresTree(options.tree) else: fsPath = args[0] tree = getTree(fsPath) if not tree: print "error: %s is not in a ceres tree" % fsPath sys.exit(1) nodePath = tree.getNodePath(fsPath) tree.createNode(nodePath, timeStep=options.step) ceres-master/bin/ceres-node-write0000755000175000017500000000233712206267256016345 0ustar jonasjonas#!/usr/bin/env python import sys import time from optparse import OptionParser from ceres import CeresTree, getTree parser = OptionParser(usage='''%prog [options] [datapoint]+ If --tree is specified, is taken as a node path Otherwise is taken as a filesystem path Each datapoint is of the form : where may be a UNIX epoch time or the character 'N' to indicate 'now'. ''') parser.add_option('--tree', default=None) options, args = parser.parse_args() if not args: parser.print_usage() sys.exit(1) if options.tree: nodePath = args[0] tree = CeresTree(options.tree) else: fsPath = args[0] tree = getTree(fsPath) if not tree: print "error: %s is not in a ceres tree" % fsPath sys.exit(1) nodePath = tree.getNodePath(fsPath) datapoints = [] now = time.time() for datapoint in args[1:]: timestamp, value = datapoint.split(':', 1) if timestamp == 'N': timestamp = now else: timestamp = float(timestamp) value = float(value) datapoints.append( (timestamp, value) ) datapoints.sort() if not args: print "error: no datapoints specified" parser.print_usage() sys.exit(1) node = tree.getNode(nodePath) node.write(datapoints) ceres-master/bin/ceres-tree-create0000755000175000017500000000136112206267256016464 0ustar jonasjonas#!/usr/bin/env python import sys from optparse import OptionParser from ceres import CeresTree parser = OptionParser(usage='''%prog [options] [property=value]*''') parser.add_option('--verbose', action='store_true') options, args = parser.parse_args() if not args: print "You must specify a root directory for the tree" parser.print_usage() sys.exit(1) root_dir = args[0] props = {} for arg in args[1:]: prop, value = arg.split('=', 1) try: # convert numeric types value = float(value) except: try: value = int(value) except: pass props[prop] = value if options.verbose: print "Creating tree at %s with props=%s" % (root_dir, props) tree = CeresTree.createTree(root_dir, **props) ceres-master/bin/ceres-node-read0000755000175000017500000000213412206267256016121 0ustar jonasjonas#!/usr/bin/env python import sys import time from optparse import OptionParser from ceres import CeresTree, getTree parser = OptionParser(usage='''%prog [options] If --tree is specified, is taken as a node path Otherwise is taken as a filesystem path ''') parser.add_option('--fromtime', default=int(time.time() - 900), type='int') parser.add_option('--untiltime', default=int(time.time()), type='int') parser.add_option('--tree', default=None) parser.add_option('--batch', action='store_true', help="Use numeric timestamps") options, args = parser.parse_args() if not args: parser.print_usage() sys.exit(1) if options.tree: nodePath = args[0] tree = CeresTree(options.tree) else: fsPath = args[0] tree = getTree(fsPath) if not tree: print "error: %s is not in a ceres tree" % fsPath sys.exit(1) nodePath = tree.getNodePath(fsPath) results = tree.fetch(nodePath, options.fromtime, options.untiltime) for (timestamp, value) in results: if options.batch: print "%d\t%s" % (timestamp, value) else: print "%s\t%s" % (time.ctime(timestamp), value) ceres-master/bin/slicecat0000755000175000017500000000120312206267256014747 0ustar jonasjonas#!/usr/bin/python import sys import os import time import struct from optparse import OptionParser parser = OptionParser(usage='%prog [options] ') options, args = parser.parse_args() if not args: parser.print_usage() sys.exit(1) path = args[0] filename = os.path.basename(path) timestamp, timeStep = filename[:-6].split('@', 1) timestamp, timeStep = int(timestamp), int(timeStep) packedValues = open(path, 'rb').read() format = '!' + ('d' * (len(packedValues) / 8)) values = struct.unpack(format, packedValues) for value in values: print "[%d]\t%s\t%s" % (timestamp, time.ctime(timestamp), value) timestamp += timeStep ceres-master/bin/convert-wsp-to-ceres0000755000175000017500000000300612206267256017171 0ustar jonasjonas#!/usr/bin/env python import sys import os import time from os.path import exists, dirname, basename, isfile, isdir, join from optparse import OptionParser import whisper import ceres parser = OptionParser(usage='''%prog [options] ''') parser.add_option('--verbose', action='store_true') parser.add_option('--delete', action='store_true') options, args = parser.parse_args() if not args: print "You must specify a wsp file" parser.print_usage() sys.exit(1) wsp_file = args[0] wsp_dir = dirname(wsp_file) metric_name = basename(wsp_file)[:-4] #strip .wsp ceres_node_dir = join(wsp_dir, metric_name) if isdir(ceres_node_dir): print "error: ceres node directory already exists (%s)" % ceres_node_dir sys.exit(1) tree = ceres.getTree(ceres_node_dir) if not tree: print "error: the specified path is not in a ceres tree" sys.exit(1) nodePath = tree.getNodePath(ceres_node_dir) if options.verbose: print "extracting datapoints from wsp file" timeInfo, values = whisper.fetch(wsp_file, fromTime=0, untilTime=time.time()) datapoints = zip( xrange(*timeInfo), values ) datapoints = [ (t,v) for (t,v) in datapoints if v is not None ] if options.verbose: print "creating ceres node %s" % nodePath node = tree.createNode(nodePath) if options.verbose: print "importing %d datapoints" % len(datapoints) node.write(datapoints) if options.delete: if options.verbose: print "deleting original wsp file: %s" % wsp_file os.unlink(wsp_file) if options.verbose: print "conversion successful" ceres-master/bin/ceres-tree-find0000755000175000017500000000124612206267256016143 0ustar jonasjonas#!/usr/bin/env python import sys from optparse import OptionParser from ceres import CeresTree parser = OptionParser(usage='''%prog [options] ''') parser.add_option('--fromtime', default=None, type='int') parser.add_option('--untiltime', default=None, type='int') parser.add_option('--fspath', action='store_true') options, args = parser.parse_args() if len(args) < 2: parser.print_usage() sys.exit(1) root_dir = args[0] pattern = args[1] tree = CeresTree(root_dir) for node in tree.find(pattern, fromTime=options.fromtime, untilTime=options.untiltime): if options.fspath: print node.fsPath else: print node.nodePath ceres-master/tests/0000755000175000017500000000000012206267256013630 5ustar jonasjonasceres-master/tests/test_ceres.py0000644000175000017500000005505312206267256016352 0ustar jonasjonasfrom unittest import TestCase from mock import ANY, Mock, call, mock_open, patch from ceres import * def fetch_mock_open_writes(open_mock): handle = open_mock() return ''.join([ c[0][0] for c in handle.write.call_args_list]) class ModuleFunctionsTest(TestCase): @patch('ceres.isdir', new=Mock(return_value=False)) @patch('ceres.CeresTree', new=Mock(spec=CeresTree)) def test_get_tree_with_no_tree(self): tree = getTree('/graphite/storage/ceres/foo/bar') self.assertEqual(None, tree) @patch('ceres.CeresTree', spec=CeresTree) @patch('ceres.isdir') def test_get_tree_with_tree_samedir(self, isdir_mock, ceres_tree_mock): isdir_mock.return_value = True tree = getTree('/graphite/storage/ceres') self.assertNotEqual(None, tree) isdir_mock.assert_called_once_with('/graphite/storage/ceres/.ceres-tree') ceres_tree_mock.assert_called_once_with('/graphite/storage/ceres') class TimeSeriesDataTest(TestCase): def setUp(self): self.time_series = TimeSeriesData(0, 50, 5, [float(x) for x in xrange(0, 10)]) def test_timestamps_property(self): self.assertEqual(10, len(self.time_series.timestamps)) self.assertEqual(0, self.time_series.timestamps[0]) self.assertEqual(45, self.time_series.timestamps[-1]) def test_iter_values(self): values = list(self.time_series) self.assertEqual(10, len(values)) self.assertEqual((0, 0.0), values[0]) self.assertEqual((45, 9.0), values[-1]) def test_merge_no_missing(self): # merge only has effect if time series has no gaps other_series = TimeSeriesData(0, 25, 5, [float(x * x) for x in xrange(1, 6)]) original_values = list(self.time_series) self.time_series.merge(other_series) self.assertEqual(original_values, list(self.time_series)) def test_merge_with_empty(self): new_series = TimeSeriesData(0, 50, 5, [None] * 10) new_series.merge(self.time_series) self.assertEqual(list(self.time_series), list(new_series)) def test_merge_with_holes(self): values = [] for x in xrange(0, 10): if x % 2 == 0: values.append(x) else: values.append(None) new_series = TimeSeriesData(0, 50, 5, values) new_series.merge(self.time_series) self.assertEqual(list(self.time_series), list(new_series)) class CeresTreeTest(TestCase): def setUp(self): with patch('ceres.isdir', new=Mock(return_value=True)): self.ceres_tree = CeresTree('/graphite/storage/ceres') @patch('ceres.isdir', new=Mock(return_value=False)) def test_init_invalid(self): self.assertRaises(ValueError, CeresTree, '/nonexistent_path') @patch('ceres.isdir', new=Mock(return_value=True)) @patch('ceres.abspath') def test_init_valid(self, abspath_mock): abspath_mock.return_value = '/var/graphite/storage/ceres' tree = CeresTree('/graphite/storage/ceres') abspath_mock.assert_called_once_with('/graphite/storage/ceres') self.assertEqual('/var/graphite/storage/ceres', tree.root) @patch('ceres.isdir', new=Mock(return_value=False)) @patch.object(CeresTree, '__init__') @patch('os.makedirs') def test_create_tree_new_dir(self, makedirs_mock, ceres_tree_init_mock): ceres_tree_init_mock.return_value = None with patch('__builtin__.open', mock_open()) as open_mock: CeresTree.createTree('/graphite/storage/ceres') makedirs_mock.assert_called_once_with('/graphite/storage/ceres/.ceres-tree', DIR_PERMS) self.assertFalse(open_mock.called) ceres_tree_init_mock.assert_called_once_with('/graphite/storage/ceres') @patch('ceres.isdir', new=Mock(return_value=True)) @patch.object(CeresTree, '__init__') @patch('os.makedirs') def test_create_tree_existing_dir(self, makedirs_mock, ceres_tree_init_mock): ceres_tree_init_mock.return_value = None with patch('__builtin__.open', mock_open()) as open_mock: CeresTree.createTree('/graphite/storage/ceres') self.assertFalse(makedirs_mock.called) self.assertFalse(open_mock.called) ceres_tree_init_mock.assert_called_once_with('/graphite/storage/ceres') @patch('ceres.isdir', new=Mock(return_value=True)) @patch.object(CeresTree, '__init__', new=Mock(return_value=None)) @patch('os.makedirs', new=Mock()) def test_create_tree_write_props(self): props = { "foo_prop": "foo_value", "bar_prop": "bar_value"} with patch('__builtin__.open', mock_open()) as open_mock: CeresTree.createTree('/graphite/storage/ceres', **props) for (prop,value) in props.items(): open_mock.assert_any_call(join('/graphite/storage/ceres', '.ceres-tree', prop), 'w') open_mock.return_value.write.assert_any_call(value) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) def test_get_node_path_clean(self): result = self.ceres_tree.getNodePath('/graphite/storage/ceres/metric/foo') self.assertEqual('metric.foo', result) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) def test_get_node_path_trailing_slash(self): result = self.ceres_tree.getNodePath('/graphite/storage/ceres/metric/foo/') self.assertEqual('metric.foo', result) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) def test_get_node_path_outside_tree(self): self.assertRaises(ValueError, self.ceres_tree.getNodePath, '/metric/foo') @patch('ceres.CeresNode', spec=CeresNode) def test_get_node_uncached(self, ceres_node_mock): ceres_node_mock.isNodeDir.return_value = True result = self.ceres_tree.getNode('metrics.foo') ceres_node_mock.assert_called_once_with( self.ceres_tree, 'metrics.foo', '/graphite/storage/ceres/metrics/foo') self.assertEqual(result, ceres_node_mock()) @patch('ceres.CeresNode', spec=CeresNode) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) @patch('ceres.glob', new=Mock(side_effect=lambda x: [x])) def test_find_explicit_metric(self, ceres_node_mock): ceres_node_mock.isNodeDir.return_value = True result = list(self.ceres_tree.find('metrics.foo')) self.assertEqual(1, len(result)) self.assertEqual(result[0], ceres_node_mock()) @patch('ceres.CeresNode', spec=CeresNode) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) @patch('ceres.glob') def test_find_wildcard(self, glob_mock, ceres_node_mock): matches = ['foo', 'bar', 'baz'] glob_mock.side_effect = lambda x: [x.replace('*', m) for m in matches] ceres_node_mock.isNodeDir.return_value = True result = list(self.ceres_tree.find('metrics.*')) self.assertEqual(3, len(result)) ceres_node_mock.assert_any_call(self.ceres_tree, 'metrics.foo', ANY) ceres_node_mock.assert_any_call(self.ceres_tree, 'metrics.bar', ANY) ceres_node_mock.assert_any_call(self.ceres_tree, 'metrics.baz', ANY) @patch('ceres.CeresNode', spec=CeresNode) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) @patch('ceres.glob', new=Mock(return_value=[])) def test_find_wildcard_no_matches(self, ceres_node_mock): ceres_node_mock.isNodeDir.return_value = False result = list(self.ceres_tree.find('metrics.*')) self.assertEqual(0, len(result)) self.assertFalse(ceres_node_mock.called) @patch('ceres.CeresNode', spec=CeresNode) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) @patch('ceres.glob', new=Mock(side_effect=lambda x: [x])) def test_find_metric_with_interval(self, ceres_node_mock): ceres_node_mock.isNodeDir.return_value = True ceres_node_mock.return_value.hasDataForInterval.return_value = False result = list(self.ceres_tree.find('metrics.foo', 0, 1000)) self.assertEqual(0, len(result)) ceres_node_mock.return_value.hasDataForInterval.assert_called_once_with(0, 1000) @patch('ceres.CeresNode', spec=CeresNode) @patch('ceres.abspath', new=Mock(side_effect=lambda x: x)) @patch('ceres.glob', new=Mock(side_effect=lambda x: [x])) def test_find_metric_with_interval_not_found(self, ceres_node_mock): ceres_node_mock.isNodeDir.return_value = True ceres_node_mock.return_value.hasDataForInterval.return_value = True result = list(self.ceres_tree.find('metrics.foo', 0, 1000)) self.assertEqual(result[0], ceres_node_mock()) ceres_node_mock.return_value.hasDataForInterval.assert_called_once_with(0, 1000) def test_store_invalid_node(self): with patch.object(self.ceres_tree, 'getNode', new=Mock(return_value=None)): datapoints = [(100, 1.0)] self.assertRaises(NodeNotFound, self.ceres_tree.store, 'metrics.foo', datapoints) @patch('ceres.CeresNode', spec=CeresNode) def test_store_valid_node(self, ceres_node_mock): datapoints = [(100, 1.0)] self.ceres_tree.store('metrics.foo', datapoints) ceres_node_mock.assert_called_once_with(self.ceres_tree, 'metrics.foo', ANY) ceres_node_mock.return_value.write.assert_called_once_with(datapoints) def fetch_invalid_node(self): with patch.object(self.ceres_tree, 'getNode', new=Mock(return_value=None)): self.assertRaises(NodeNotFound, self.ceres_tree.fetch, 'metrics.foo') @patch('ceres.CeresNode', spec=CeresNode) def fetch_metric(self, ceres_node_mock): read_mock = ceres_node_mock.return_value.read read_mock.return_value = Mock(spec=TimeSeriesData) result = self.ceres_tree.fetch('metrics.foo', 0, 1000) ceres_node_mock.assert_called_once_with(self.ceres_tree, 'metrics.foo', ANY) read_mock.assert_called_once_with(0, 1000) self.assertEqual(Mock(spec=TimeSeriesData), result) class CeresNodeTest(TestCase): def setUp(self): with patch('ceres.isdir', new=Mock(return_value=True)): with patch('ceres.exists', new=Mock(return_value=True)): self.ceres_tree = CeresTree('/graphite/storage/ceres') self.ceres_node = CeresNode(self.ceres_tree, 'sample_metric', '/graphite/storage/ceres/sample_metric') self.ceres_node.timeStep = 60 slice_configs = [ ( 1200, 1800, 60 ), ( 600, 1200, 60 )] self.ceres_slices = [] for start, end, step in slice_configs: slice_mock = Mock(spec=CeresSlice) slice_mock.startTime = start slice_mock.endTime = end slice_mock.timeStep = step self.ceres_slices.append(slice_mock) def test_init_sets_default_cache_behavior(self): ceres_node = CeresNode(self.ceres_tree, 'sample_metric', '/graphite/storage/ceres/sample_metric') self.assertEqual(DEFAULT_SLICE_CACHING_BEHAVIOR, ceres_node.sliceCachingBehavior) @patch('ceres.os.makedirs', new=Mock()) @patch('ceres.CeresNode.writeMetadata') def test_create_sets_a_default_timestep(self, write_metadata_mock): ceres_node = CeresNode.create(self.ceres_tree, 'sample_metric') write_metadata_mock.assert_called_with(dict(timeStep=DEFAULT_TIMESTEP)) @patch('ceres.os.makedirs', new=Mock()) @patch('ceres.CeresNode.writeMetadata', new=Mock()) def test_create_returns_new_ceres_node(self): ceres_node = CeresNode.create(self.ceres_tree, 'sample_metric') self.assertTrue(isinstance(ceres_node, CeresNode)) def test_write_metadata(self): import json open_mock = mock_open() metadata = dict(timeStep=60, aggregationMethod='avg') with patch('__builtin__.open', open_mock): self.ceres_node.writeMetadata(metadata) self.assertEquals(json.dumps(metadata), fetch_mock_open_writes(open_mock)) def test_read_metadata_sets_timestep(self): import json metadata = dict(timeStep=60, aggregationMethod='avg') json_metadata = json.dumps(metadata) open_mock = mock_open(read_data=json_metadata) with patch('__builtin__.open', open_mock): self.ceres_node.readMetadata() open_mock().read.assert_called_once() self.assertEqual(60, self.ceres_node.timeStep) def test_set_slice_caching_behavior_validates_names(self): self.ceres_node.setSliceCachingBehavior('none') self.assertEquals('none', self.ceres_node.sliceCachingBehavior) self.ceres_node.setSliceCachingBehavior('all') self.assertEquals('all', self.ceres_node.sliceCachingBehavior) self.ceres_node.setSliceCachingBehavior('latest') self.assertEquals('latest', self.ceres_node.sliceCachingBehavior) self.assertRaises(ValueError, self.ceres_node.setSliceCachingBehavior, 'foo') # Assert unchanged self.assertEquals('latest', self.ceres_node.sliceCachingBehavior) def test_slices_is_a_generator(self): from types import GeneratorType self.assertTrue(isinstance(self.ceres_node.slices, GeneratorType)) def test_slices_returns_cached_set_when_behavior_is_all(self): def mock_slice(): return Mock(spec=CeresSlice) self.ceres_node.setSliceCachingBehavior('all') cached_contents = [ mock_slice for c in range(4) ] self.ceres_node.sliceCache = cached_contents with patch('ceres.CeresNode.readSlices') as read_slices_mock: slice_list = list(self.ceres_node.slices) self.assertFalse(read_slices_mock.called) self.assertEquals(cached_contents, slice_list) def test_slices_returns_first_cached_when_behavior_is_latest(self): self.ceres_node.setSliceCachingBehavior('latest') cached_contents = Mock(spec=CeresSlice) self.ceres_node.sliceCache = cached_contents read_slices_mock = Mock(return_value=[]) with patch('ceres.CeresNode.readSlices', new=read_slices_mock): slice_iter = self.ceres_node.slices self.assertEquals(cached_contents, slice_iter.next()) # We should be yielding cached before trying to read self.assertFalse(read_slices_mock.called) def test_slices_reads_remaining_when_behavior_is_latest(self): self.ceres_node.setSliceCachingBehavior('latest') cached_contents = Mock(spec=CeresSlice) self.ceres_node.sliceCache = cached_contents read_slices_mock = Mock(return_value=[(0,60)]) with patch('ceres.CeresNode.readSlices', new=read_slices_mock): slice_iter = self.ceres_node.slices slice_iter.next() # *now* we expect to read from disk try: while True: slice_iter.next() except StopIteration: pass read_slices_mock.assert_called_once_with() def test_slices_reads_from_disk_when_behavior_is_none(self): self.ceres_node.setSliceCachingBehavior('none') read_slices_mock = Mock(return_value=[(0,60)]) with patch('ceres.CeresNode.readSlices', new=read_slices_mock): slice_iter = self.ceres_node.slices slice_iter.next() read_slices_mock.assert_called_once_with() def test_slices_reads_from_disk_when_cache_empty_and_behavior_all(self): self.ceres_node.setSliceCachingBehavior('all') read_slices_mock = Mock(return_value=[(0,60)]) with patch('ceres.CeresNode.readSlices', new=read_slices_mock): slice_iter = self.ceres_node.slices slice_iter.next() read_slices_mock.assert_called_once_with() def test_slices_reads_from_disk_when_cache_empty_and_behavior_latest(self): self.ceres_node.setSliceCachingBehavior('all') read_slices_mock = Mock(return_value=[(0,60)]) with patch('ceres.CeresNode.readSlices', new=read_slices_mock): slice_iter = self.ceres_node.slices slice_iter.next() read_slices_mock.assert_called_once_with() @patch('ceres.exists', new=Mock(return_value=False)) def test_read_slices_raises_when_node_doesnt_exist(self): self.assertRaises(NodeDeleted, self.ceres_node.readSlices) @patch('ceres.exists', new=Mock(return_Value=True)) def test_read_slices_ignores_not_slices(self): listdir_mock = Mock(return_value=['0@60.slice', '0@300.slice', 'foo']) with patch('ceres.os.listdir', new=listdir_mock): self.assertEquals(2, len(self.ceres_node.readSlices())) @patch('ceres.exists', new=Mock(return_Value=True)) def test_read_slices_parses_slice_filenames(self): listdir_mock = Mock(return_value=['0@60.slice', '0@300.slice']) with patch('ceres.os.listdir', new=listdir_mock): slice_infos = self.ceres_node.readSlices() self.assertTrue((0,60) in slice_infos) self.assertTrue((0,300) in slice_infos) @patch('ceres.exists', new=Mock(return_Value=True)) def test_read_slices_reverse_sorts_by_time(self): listdir_mock = Mock(return_value=[ '0@60.slice', '320@300.slice', '120@120.slice', '0@120.slice', '600@300.slice']) with patch('ceres.os.listdir', new=listdir_mock): slice_infos = self.ceres_node.readSlices() slice_timestamps = [ s[0] for s in slice_infos ] self.assertEqual([600,320,120,0,0], slice_timestamps) def test_no_data_exists_if_no_slices_exist(self): with patch('ceres.CeresNode.readSlices', new=Mock(return_value=[])): self.assertFalse(self.ceres_node.hasDataForInterval(0,60)) def test_no_data_exists_if_no_slices_exist_and_no_time_specified(self): with patch('ceres.CeresNode.readSlices', new=Mock(return_value=[])): self.assertFalse(self.ceres_node.hasDataForInterval(None,None)) def test_data_exists_if_slices_exist_and_no_time_specified(self): with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.assertTrue(self.ceres_node.hasDataForInterval(None,None)) def test_data_exists_if_slice_covers_interval_completely(self): with patch('ceres.CeresNode.slices', new=[self.ceres_slices[0]]): self.assertTrue(self.ceres_node.hasDataForInterval(1200,1800)) def test_data_exists_if_slice_covers_interval_end(self): with patch('ceres.CeresNode.slices', new=[self.ceres_slices[0]]): self.assertTrue(self.ceres_node.hasDataForInterval(600, 1260)) def test_data_exists_if_slice_covers_interval_start(self): with patch('ceres.CeresNode.slices', new=[self.ceres_slices[0]]): self.assertTrue(self.ceres_node.hasDataForInterval(1740, 2100)) def test_no_data_exists_if_slice_touches_interval_end(self): with patch('ceres.CeresNode.slices', new=[self.ceres_slices[0]]): self.assertFalse(self.ceres_node.hasDataForInterval(600, 1200)) def test_no_data_exists_if_slice_touches_interval_start(self): with patch('ceres.CeresNode.slices', new=[self.ceres_slices[0]]): self.assertFalse(self.ceres_node.hasDataForInterval(1800, 2100)) def test_compact_returns_empty_if_passed_empty(self): self.assertEqual([], self.ceres_node.compact([])) def test_compact_filters_null_values(self): self.assertEqual([], self.ceres_node.compact([(60,None)])) def test_compact_rounds_timestamps_down_to_step(self): self.assertEqual([[(600,0)]], self.ceres_node.compact([(605,0)])) def test_compact_drops_duplicate_timestamps(self): datapoints = [ (600, 0), (600, 0) ] compacted = self.ceres_node.compact(datapoints) self.assertEqual([[(600, 0)]], compacted) def test_compact_groups_contiguous_points(self): datapoints = [ (600, 0), (660, 0), (840,0) ] compacted = self.ceres_node.compact(datapoints) self.assertEqual([[(600, 0), (660,0)], [(840,0)]], compacted) def test_write_noops_if_no_datapoints(self): with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write([]) self.assertFalse(self.ceres_slices[0].write.called) def test_write_within_first_slice(self): datapoints = [(1200, 0.0), (1260, 1.0), (1320, 2.0)] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) self.ceres_slices[0].write.assert_called_once_with(datapoints) @patch('ceres.CeresSlice.create') def test_write_within_first_slice_doesnt_create(self, slice_create_mock): datapoints = [(1200, 0.0), (1260, 1.0), (1320, 2.0)] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) self.assertFalse(slice_create_mock.called) @patch('ceres.CeresSlice.create', new=Mock()) def test_write_within_first_slice_with_gaps(self): datapoints = [ (1200,0.0), (1320,2.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) # sorted most recent first calls = [call.write([datapoints[1]]), call.write([datapoints[0]])] self.ceres_slices[0].assert_has_calls(calls) @patch('ceres.CeresSlice.create', new=Mock()) def test_write_within_previous_slice(self): datapoints = [ (720,0.0), (780,2.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) # 2nd slice has this range self.ceres_slices[1].write.assert_called_once_with(datapoints) @patch('ceres.CeresSlice.create') def test_write_within_previous_slice_doesnt_create(self, slice_create_mock): datapoints = [ (720,0.0), (780,2.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) self.assertFalse(slice_create_mock.called) @patch('ceres.CeresSlice.create', new=Mock()) def test_write_within_previous_slice_with_gaps(self): datapoints = [ (720,0.0), (840,2.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) calls = [call.write([datapoints[1]]), call.write([datapoints[0]])] self.ceres_slices[1].assert_has_calls(calls) @patch('ceres.CeresSlice.create', new=Mock()) def test_write_across_slice_boundaries(self): datapoints = [ (1080,0.0), (1140,1.0), (1200, 2.0), (1260, 3.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) self.ceres_slices[0].write.assert_called_once_with(datapoints[2:4]) self.ceres_slices[1].write.assert_called_once_with(datapoints[0:2]) @patch('ceres.CeresSlice.create') def test_write_before_earliest_slice_creates_new(self, slice_create_mock): datapoints = [ (300, 0.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) slice_create_mock.assert_called_once_with(self.ceres_node, 300, 60) @patch('ceres.CeresSlice.create') def test_write_before_earliest_slice_writes_to_new_one(self, slice_create_mock): datapoints = [ (300, 0.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) slice_create_mock.return_value.write.assert_called_once_with(datapoints) @patch('ceres.CeresSlice.create') def test_create_during_write_clears_slice_cache(self, slice_create_mock): self.ceres_node.setSliceCachingBehavior('all') self.ceres_node.sliceCache = self.ceres_slices datapoints = [ (300, 0.0) ] with patch('ceres.CeresNode.slices', new=self.ceres_slices): self.ceres_node.write(datapoints) self.assertEquals(None, self.ceres_node.sliceCache) class CeresSliceTest(TestCase): def setUp(self): with patch('ceres.isdir', new=Mock(return_value=True)): with patch('ceres.exists', new=Mock(return_value=True)): self.ceres_tree = CeresTree('/graphite/storage/ceres') self.ceres_node = CeresNode(self.ceres_tree, 'sample_metric', '/graphite/storage/ceres/sample_metric') def test_init_sets_fspath_name(self): ceres_slice = CeresSlice(self.ceres_node, 0, 60) self.assertTrue(ceres_slice.fsPath.endswith('0@60.slice')) ceres-master/tests/__init__.py0000644000175000017500000000000012206267256015727 0ustar jonasjonasceres-master/requirements.txt0000644000175000017500000000003012206267256015743 0ustar jonasjonasnose==1.2.1 mock==1.0.1