pax_global_header 0000666 0000000 0000000 00000000064 13572723506 0014524 g ustar 00root root 0000000 0000000 52 comment=c7eea5189881fd7b0f9aab73eefdc5445a7f54f8
dltlyse-1.1/ 0000775 0000000 0000000 00000000000 13572723506 0013045 5 ustar 00root root 0000000 0000000 dltlyse-1.1/.gitignore 0000664 0000000 0000000 00000002263 13572723506 0015040 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
dltlyse-1.1/LICENSE 0000664 0000000 0000000 00000040525 13572723506 0014060 0 ustar 00root root 0000000 0000000 Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.
dltlyse-1.1/README.md 0000664 0000000 0000000 00000004375 13572723506 0014335 0 ustar 00root root 0000000 0000000 # DLT Analyser
A Python module and a collection of plugins to support analysis of DLT traces.
# Installation
# Execution
In SDK simply go to your result folder where your DLT files are and run:
```
dltlyse *.dlt
```
Run dltlyse with "--help" option to see more command line options
# How it works
`dltlyse` reads all messages from given DLT trace file and passes each DLT message to __call__ of all enabled plugins.
Plugin then decides if the message is interesting for it's purpose and collects data.
At start of each device lifecycle new_lifecycle is called and at the end
end_lifecycle is called, in this way the plugins can track when the device was
rebooted. It is guaranteed that all messages will belong to a lifecycle, so
new_lifecycle will be called before any DLT message is passed to __call__ and
end_lifecycle will be called after last message before there will be a call ro
report.
Then the report() method from each plugin is called after all DLT messages have been passed through all enabled plugins.
The report() method should set one or more results from the processing as well as write details into files.
# Writing custom plugins
`dltlyse` could be easily extended with custom plugins using simple plugin API. Just use the following code snipplet
as a template stored in the "plugins" directory:
```
from dltlyse.core.plugin_base import Plugin
class MyCustomPlugin(Plugin):
"""Does some custom job"""
message_filters = ["XXX", "YYY"]
def __call__(self, message):
# will be called for each message where message.apid="XXX" and message.ctid="YYY":
# do some stuff, save knowledge into self
def new_lifecycle(self, ecu_id, lifecycle_id):
# will be called each time the device starts up with incremental id
def end_lifecycle(self, ecu_id, lifecycle_id):
# will be called each time the device shuts down
def report(self):
# called at the end
if self.good:
self.add_result(message="Good result", attach=["somefile.txt"])
# Attachment path is relative to extracted_files/ folder in results
else:
self.add_result(
state="failure",
message="This failed",
stdout="Detailed log of failure",
)
```
dltlyse-1.1/dltlyse/ 0000775 0000000 0000000 00000000000 13572723506 0014525 5 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/__init__.py 0000664 0000000 0000000 00000000000 13572723506 0016624 0 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/core/ 0000775 0000000 0000000 00000000000 13572723506 0015455 5 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/core/__init__.py 0000664 0000000 0000000 00000000000 13572723506 0017554 0 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/core/analyser.py 0000664 0000000 0000000 00000040742 13572723506 0017654 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""DLT file analyser"""
from __future__ import print_function
import logging
import time
import traceback
import os.path
import signal
import sys
from contextlib import contextmanager
import six
from dlt import dlt
from dltlyse.core.report import XUnitReport, Result
from dltlyse.core.plugin_base import Plugin
# pylint: disable= too-many-nested-blocks, no-member
logger = logging.getLogger(__name__)
stdoutlogger = logging.getLogger("summary")
stdoutlogger.addHandler(logging.StreamHandler(sys.stdout))
DEFAULT_PLUGINS_DIRS = [
os.path.join(os.path.dirname(__file__), "../plugins"), # installation folder
# e.g. /usr/bin/pythonX.X/site-packages/dltlyse/plugins
os.path.join(os.getcwd(), "plugins"), # plugins folder in current working directory
]
# Traces to buffer since they might be stored before lifecycle start message
buffer_matches = [
{"apid": "DA1", "ctid": "DC1", "payload_decoded": "[connection_info ok] connected \00\00\00\00"},
{"ecuid": "XORA"},
]
MAX_BUFFER_SIZE = 50
DLT_LIFECYCLE_START = {
"apid": "DLTD",
"ctid": "INTM",
"payload_decoded": "Daemon launched. Starting to output traces...",
}
class DLTLifecycle(object):
"""Single DLT lifecycle"""
def __init__(self, ecu_id, lifecycle_id, dltfile=None):
self.ecu_id = ecu_id
self.dltfile = dltfile
self.lifecycle_id = lifecycle_id
self._first_msg = None
self._last_msg = None
def set_first_msg(self, msg):
"""Set the first msg
explicitly needed for old dlt-atlas scripts
:param DLTMessage msg: The message to be set as the first
"""
self._first_msg = msg
def set_last_msg(self, msg):
"""Set the first msg
explicitly needed for old dlt-atlas scripts
:param DLTMessage msg: The message to be set as the last
"""
self._last_msg = msg
def __getitem__(self, index):
"""Get either the first or last msg in this lifecycle
explicitly needed for old dlt-atlas scripts
:param int index: Index to either get first or last msg
"""
if index == 0:
if self._first_msg:
return self._first_msg
else:
logger.error("Set first msg of lifecycle before using lifecycle object")
raise ValueError()
elif index == -1:
if self._last_msg:
return self._last_msg
else:
logger.error("Set last msg of lifecycle before using lifecycle object")
raise ValueError()
else:
logger.error("Access to messages beyond 0 and -1 unsupported"
"- use DLTFile.lifecycles")
raise IndexError()
def clear_msgs(self):
"""Clear the first and last msg"""
self._first_msg = None
self._last_msg = None
def get_end(self):
"""Get last msg in this lifecycle
explicitly needed for old dlt-atlas scripts
"""
return self._last_msg
@contextmanager
def handle_plugin_exceptions(plugin, action="running"):
"""Catch all exceptions and store them in the plugin.__exceptions structure"""
start_time = time.time()
try:
yield
except: # pylint: disable=bare-except
message = "Error {} plugin {} - {}".format(action, plugin.get_plugin_name(), sys.exc_info()[0])
logger.error(message)
logger.error(traceback.format_exc())
if not isinstance(plugin, type):
plugin.add_exception('\n'.join([message, traceback.format_exc()]))
if not isinstance(plugin, type):
plugin.add_timing(action, time.time() - start_time)
def _scan_folder(root, plugin_classes):
""" Scans a folder seeking for plugins.
Args:
root(str): the path to scan.
plugin_classes(list): a list which collects all plugins found.
"""
if not os.path.isdir(root): # Skip non-existing folders.
logger.warn("Directory '%s' doesn't exist!", root)
return
filenames = os.listdir(root)
if "__NO_PLUGINS__" in filenames: # If the folder hasn't plugins, we skip it.
return
sys.path.insert(0, root)
sys.path.insert(1, os.path.dirname(__file__))
for name in filenames:
full_path = os.path.join(root, name)
if os.path.isdir(full_path):
if name != 'tests': # We skip the tests folder.
_scan_folder(full_path, plugin_classes)
elif name.endswith('.py') and not name.startswith('_'): # We skip non-Python files, and private files.
module_name = os.path.splitext(os.path.split(name)[-1])[0]
try:
__import__(module_name)
module = sys.modules[module_name]
for class_name in dir(module):
cls = getattr(module, class_name)
if six.PY3:
if (hasattr(cls, "__mro__") and issubclass(cls, Plugin) and
(not any(hasattr(getattr(cls, item), "__isabstractmethod__") and
not isinstance(getattr(cls, item), property) for item in dir(cls)))):
plugin_classes.append(cls)
else:
if hasattr(cls, "__mro__") and issubclass(cls, Plugin) and not cls.__abstractmethods__:
plugin_classes.append(cls)
except (ImportError, ValueError):
logger.error("Could not load plugin %s\n%s", module_name, traceback.format_exc())
def get_plugin_classes(plugin_dirs): # pylint: disable=too-many-locals
"""Collect plugin classes"""
plugin_classes = []
for plugins_dir in plugin_dirs:
logger.info("Searching directory '%s' for plugins", plugins_dir)
_scan_folder(plugins_dir, plugin_classes)
return plugin_classes
class DLTAnalyser(object):
"""DLT Analyser"""
def __init__(self):
self.plugins = []
self.file_exceptions = {}
self.traces = []
self._buffered_traces = []
self.dlt_file = None
def process_buffer(self):
"""Return buffered traces and clear buffer"""
if self._buffered_traces:
for trace in self._buffered_traces:
self.process_message(trace)
self._buffered_traces = []
def load_plugins(self, plugin_dirs, plugins=None, exclude=None, no_default_dir=False):
"""Load plugins from "plugins" directory"""
if no_default_dir is False:
plugin_dirs += DEFAULT_PLUGINS_DIRS
plugin_classes = get_plugin_classes(plugin_dirs)
if plugins:
plugins = list(set(plugins))
for cls in plugin_classes:
if plugins is None:
if cls.manually_executed and \
os.environ.get("DLTLYSE_ALL_INCLUDES_MANUAL", "false").lower() not in ('1', 'true', 'yes',):
continue
else:
if not cls.get_plugin_name() in plugins:
continue
plugins.remove(cls.get_plugin_name())
if exclude is not None and cls.get_plugin_name() in exclude:
continue
logger.info("Loading plugin '%s' from '%s'", cls.get_plugin_name(), cls.__module__)
with handle_plugin_exceptions(cls, 'loading'):
self.plugins.append(cls())
if plugins:
logger.error("Some plugins that were requested were not found: %s", plugins)
raise RuntimeError("Error loading requested plugins: {}".format(", ".join(plugins)))
def show_plugins(self):
"""Show available plugins"""
text = "Available plugins:\n"
for plugin in self.plugins:
classname = plugin.get_plugin_name()
try:
plugindoc = plugin.__doc__.splitlines()[0]
except AttributeError:
plugindoc = plugin.__doc__
text += " - {} ({})\n".format(classname, plugindoc)
return text
def get_filters(self):
"""Extract filtering information from plugins"""
filters = []
for plugin in self.plugins:
if plugin.message_filters == "all":
logger.debug("Speed optimization disabled: '%s' plugin requires all messages",
plugin.get_plugin_name())
filters = None
break
for flt in plugin.message_filters:
if flt not in filters:
filters.append(flt)
return filters
def start_lifecycle(self, ecu_id, lifecycle_id):
"""call DltAtlas plugin API - new_lifecycle"""
for plugin in self.plugins:
with handle_plugin_exceptions(plugin, "calling new_lifecycle"):
plugin.new_lifecycle(ecu_id, lifecycle_id)
def process_message(self, message):
"""Pass on the message to plugins that need it"""
for plugin in self.plugins:
if plugin.message_filters == "all" or \
(message.apid, message.ctid) in plugin.message_filters or \
("", message.ctid) in plugin.message_filters or \
(message.apid, "") in plugin.message_filters:
with handle_plugin_exceptions(plugin, "calling"):
plugin(message)
def end_lifecycle(self, lifecycle, lifecycle_id):
"""Finish lifecycle processing for all plugins"""
for plugin in self.plugins:
if hasattr(plugin, "prep_plugin_env"):
plugin.prep_plugin_env(lifecycle, lifecycle_id)
for plugin in self.plugins:
with handle_plugin_exceptions(plugin, "calling end_lifecycle"):
plugin.end_lifecycle(lifecycle.ecu_id, lifecycle_id)
# pylint: disable=too-many-locals, too-many-statements
def run_analyse(self, traces, xunit, no_sort, is_live, testsuite_name="dltlyse"):
"""Read the DLT trace and call each plugin for each message read"""
filters = self.get_filters()
# add filter for lifecycle start message in case it is missing
# filters == None means no filtering is done at all
flt = (DLT_LIFECYCLE_START["apid"].encode("utf-8"), DLT_LIFECYCLE_START["ctid"].encode("utf-8"))
if filters and flt not in filters:
filters.append(flt)
old_lifecycle = None
lifecycle = None
last_msg = None
lifecycle_id = 0
self.traces = traces
if is_live:
signal.signal(signal.SIGINT, self.stop_signal_handler)
for filename in traces:
logger.info("Reading trace file '%s'", filename)
with self.handle_file_exceptions(filename):
tracefile = dlt.load(filename, split=not no_sort, filters=filters, live_run=is_live)
self.dlt_file = tracefile
msg = None
for msg in tracefile:
is_start_msg = msg.compare(DLT_LIFECYCLE_START)
bufferable_msg = any(msg.compare(trace) for trace in buffer_matches)
# Buffer Messages if we find special
# marked msgs that should be buffered
# don't process these messages yet in this lifecycle
if bufferable_msg and len(self._buffered_traces) < MAX_BUFFER_SIZE:
self._buffered_traces.append(msg)
continue
# We found a start message, if this is the first ever then just start a new lifecycle,
# process any buffered messages and proceed. If we already have a lifecycle, then end that
# lifecycle and proceed as previously stated.
if is_start_msg:
if lifecycle:
lifecycle.set_last_msg(last_msg)
self.end_lifecycle(lifecycle, lifecycle.lifecycle_id)
lifecycle_id += 1
lifecycle = self.setup_lifecycle(msg=msg, lifecycle_id=lifecycle_id)
logger.info("DLT Analysis Starting life cycle %d", lifecycle.lifecycle_id)
if not lifecycle:
lifecycle = self.setup_lifecycle(msg, lifecycle_id=lifecycle_id, process_buffer=True)
self.process_buffer()
self.process_message(msg)
last_msg = msg
if lifecycle:
lifecycle.set_last_msg(last_msg)
old_lifecycle = lifecycle
# If the files only contained bufferable traces less than MAX_BUFFER_SIZE
# we create a life_cycle 0 to accommodate these msgs
if not lifecycle and self._buffered_traces:
lifecycle = self.setup_lifecycle(msg=msg, lifecycle_id=lifecycle_id, process_buffer=True)
old_lifecycle = lifecycle
if old_lifecycle:
self.process_buffer()
self.end_lifecycle(old_lifecycle, lifecycle_id)
return self.generate_reports(xunit, testsuite_name)
def generate_reports(self, xunit, testsuite_name):
"""Generates reports at the end of execution"""
logger.info("Generating reports")
xreport = XUnitReport(outfile=xunit, testsuite_name=testsuite_name)
run_result = 0
file_results = []
for plugin in self.plugins:
output = "Report for {} ... ".format(plugin.get_plugin_name())
with handle_plugin_exceptions(plugin, "calling report"):
plugin.report()
run_result |= 0 if plugin.report_exceptions() else 2
for state in ["success", "error", "failure", "skipped"]:
output += "{} {} ".format(len([x for x in plugin.get_results() if x.state == state]), state)
if all([x.state in ["success", "skipped"] for x in plugin.get_results()]):
output += "= passed."
else:
output += "= failed."
run_result |= 1
stdoutlogger.debug("- Error report for %s:", plugin.get_plugin_name())
for result in plugin.get_results():
if result.state != "success":
stdoutlogger.debug(result.message)
stdoutlogger.debug(result.stdout)
stdoutlogger.info(output)
xreport.add_results(plugin.get_results())
for filename in self.traces:
output = "Report for file"
if filename in self.file_exceptions:
stdoutlogger.debug(self.file_exceptions[filename])
stdoutlogger.info("%s %s ... = failed", output, filename)
file_results.append(Result(
classname="DLTAnalyser",
testname="File Sanity Checks During Execution",
state="error",
stdout=self.file_exceptions[filename],
message=self.file_exceptions[filename]
))
else:
stdoutlogger.info("%s %s ... = passed", output, filename)
file_results.append(Result(
classname="DLTAnalyser",
testname="File Sanity Checks During Execution",
state="success",
stdout="File Parsed Successfully",
message="File Parsed Successfully"
))
xreport.add_results(file_results)
if self.file_exceptions:
run_result |= 4
xreport.render()
logger.info("Done.")
return run_result
def setup_lifecycle(self, msg, lifecycle_id, process_buffer=False):
"""Setup a new lifecycle by setting correct properties"""
lifecycle = DLTLifecycle(ecu_id=msg.ecuid, lifecycle_id=lifecycle_id)
lifecycle.set_first_msg(msg)
self.start_lifecycle(lifecycle.ecu_id, lifecycle.lifecycle_id)
if process_buffer:
self.process_buffer()
return lifecycle
@contextmanager
def handle_file_exceptions(self, file_name):
"""Catch all exceptions and store them in the DLTAnalyzer.file_exceptions structure"""
try:
yield
except IOError as err: # pylint: disable=bare-except
message = "Error Loading File {} - {}".format(file_name, err)
logger.exception(message)
self.file_exceptions[file_name] = message
def stop_signal_handler(self, signum, frame):
"""Catch SIGINT to stop any further analyzing of DLT Trace file in a live run"""
logging.debug("Signal Handler called with signal:%d", signum)
self.dlt_file.stop_reading.set()
dltlyse-1.1/dltlyse/core/plugin_base.py 0000664 0000000 0000000 00000036175 13572723506 0020333 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016-2018. BMW Car IT GmbH. All rights reserved.
"""Base class for dltlyse plugins"""
import csv
import functools
import logging
import os
import re
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from six import string_types
from dltlyse.core.report import Result
from dltlyse.core.utils import round_float
# pylint: disable= unsupported-membership-test
EXTRACT_DIR = "extracted_files"
logger = logging.getLogger(__name__)
class Plugin(object):
"""dltlyse Plugin base class"""
__metaclass__ = ABCMeta
# message filters are filters that will be used during loading DLT trace file. Each plugin defines
# list of (APID, CTID) pairs and only messages matching those criteria are read from the DLT trace file.
# This is used for speed optimization
# Limitation: DLT library only supports adding 30 filters. If we collect more than 30 filter pairs, the whole
# mechanism is disabled
# For special purposes when you need to analyse all messages, you can define message_filters = "all"
# which also disables the filtering completely.
message_filters = []
manually_executed = False # True if a plugin should be manually selected (not automatic execution).
def __init__(self):
self.__results = []
self.__exceptions = []
self.__timings = defaultdict(float)
@abstractmethod
def __call__(self, message):
"""object will be called for every message
param: DltMessage message: object represeting a single line in DLT log. Commonly used methods are:
message.compare(dict(apid="APP", ctid="CONTEXT")) -- match a message to a filter
str(message.payload_decoded) - full payload of the message as a string
message.payload - a list with payload data fields with their types
message.tmsp - message timestamp (relative to lifecycle start)
"""
pass
@classmethod
def get_plugin_name(cls):
"""Return plugin name"""
return cls.__name__
@abstractmethod
def report(self):
"""Report the run after all messages has been read"""
pass
def add_result(self, **kwargs):
"""Adds a Result object with set values"""
kwargs.setdefault("classname", self.get_plugin_name())
try:
testname = self.__doc__.splitlines()[0]
except AttributeError:
testname = ""
kwargs.setdefault("testname", testname)
self.__results.append(Result(**kwargs))
def add_attachments(self, attachments):
"""Adds attachments to the last result, creating a result if none exist"""
if not self.__results:
self.add_result()
if attachments:
self.__results[-1].attach.extend(attachments)
def add_timing(self, action, timing):
"""Add time used by the plugin in an action"""
self.__timings[action] += timing
def add_exception(self, message):
"""Add an exception message"""
if message not in self.__exceptions:
self.__exceptions.append(message)
def report_exceptions(self):
"""Report all detected exceptions"""
logger.debug("Timings of plugin %s: %s",
self.get_plugin_name(), {k: str(round_float(v, 2)) for k, v in self.__timings.items()})
if self.__exceptions:
self.add_result(
testname="Exceptions during execution",
state="error",
message="Exceptions detected while executing the plugin",
stdout="\n-------------\n".join(self.__exceptions),
)
return False
return True
def get_results(self):
"""Return the results object"""
return self.__results
def new_lifecycle(self, ecu_id, lifecycle_id): # pylint: disable=no-self-use,unused-argument
"""Called at the start of each lifecycle (including first)"""
pass
def end_lifecycle(self, ecu_id, lifecycle_id): # pylint: disable=no-self-use,unused-argument
"""Called at the end of each lifecycle (including last)"""
pass
class CSVPlugin(Plugin): # pylint: disable=abstract-method
"""Base class for plugins that output a CSV file as an output"""
# If you have only one file you can use these two lines
csv_filename = None # If you only have one file, you can use this. Set to "subdir/example.csv" in subclass
csv_fields = None # If using only one file, set this to the list of column headers
# If you want to use multiple CSV files, please use csv_filenames and provide columns per file
csv_filenames = None
# Examples:
# csv_filenames = {}
# csv_filenames ["my_csvfile.csv"] = ["column1", "column2", ...]
# csv_filenames ["my_subdir/my_csvfile2.csv"] = ["column2.1", "column2.2", ...]
def __init__(self):
self._csv = {}
self._csv_fileobj = {}
# for backward compatibility: if csv_filename was defined, add it to csv_filenames
if self.csv_filename:
self.csv_filenames = {self.csv_filename: self.csv_fields}
super(CSVPlugin, self).__init__()
def _create_csvfile(self, filename=None):
"""Create csv file and add first row with column names"""
filename = filename or list(self.csv_filenames)[0]
pathname = os.path.join("extracted_files", filename)
if not os.path.exists(os.path.dirname(pathname)):
os.makedirs(os.path.dirname(pathname))
self._csv_fileobj[filename] = open(pathname, "w")
self._csv[filename] = csv.writer(self._csv_fileobj[filename])
if self.csv_filenames[filename]: # Only write header line if columns are defined.
self._csv[filename].writerow(self.csv_filenames[filename])
else:
logger.debug("No header line written to file %s", filename)
def writerow(self, data_row, filename=None):
"""Write a row to CSV file"""
filename = filename or list(self.csv_filenames)[0]
if filename not in self._csv:
self._create_csvfile(filename)
self._csv[filename].writerow(data_row)
def writerows(self, data_rows, filename=None):
"""Write several rows to csv file"""
filename = filename or list(self.csv_filenames)[0]
if filename not in self._csv:
self._create_csvfile(filename)
self._csv[filename].writerows(data_rows)
def report(self):
"""Write the csv file"""
self._close_csv_files()
self.add_attachments(self.csv_filenames.keys())
def _close_csv_file(self, filename=None):
"""Close CSV file"""
filename = filename or list(self.csv_filenames)[0]
if self._csv[filename]:
self._csv_fileobj[filename].close()
def _close_csv_files(self):
"""Close all CSV files"""
for filename in self._csv:
self._close_csv_file(filename)
class LifecycleCSVPlugin(CSVPlugin): # pylint: disable=abstract-method
"""Used to create a set of csv files for every lifecycle"""
# These will be copied to csv_filenames and csv_fields for every lifecycle
lifecycle_csv_filenames = None
__all_csv_filenames = None
def new_lifecycle(self, ecu_id, lifecycle_id):
"""Creates the CSV files for the lifecycle"""
base_folder = "Lifecycles/{0:02}".format(lifecycle_id)
self.csv_filenames = {os.path.join(base_folder, k): v for k, v in self.lifecycle_csv_filenames.items()}
super(LifecycleCSVPlugin, self).new_lifecycle(ecu_id, lifecycle_id)
def end_lifecycle(self, ecu_id, lifecycle_id):
"""Closes the CSV files and stores them for attaching to the result"""
self._close_csv_files()
if not self.__all_csv_filenames:
self.__all_csv_filenames = []
self.__all_csv_filenames.extend(self.csv_filenames.keys())
super(LifecycleCSVPlugin, self).end_lifecycle(ecu_id, lifecycle_id)
def report(self):
"""Attaches all CSV files to the result"""
self.add_attachments(self.__all_csv_filenames)
def find_file(self, filename):
"""Find a filename matching a substring from the current lifecycle"""
return [afile for afile in self.csv_filenames.keys() if filename in afile][0]
def dlt_callback(app_id=None, ctx_id=None):
"""Decorates a method which is intended to be used as a callback for dltlyse.
It collects the app_id and ctx_id values, and saves them into the method.
Args:
app_id(str): if defined, is the app_id that we want to catch.
ctx_id(str): if defined, is the ctx_id that we want to catch.
"""
def wrapper(func): # pylint: disable=missing-docstring
func.filter_condition = app_id or "", ctx_id or ""
return func
return wrapper
class CallBacksAndReportPlugin(Plugin): # pylint: disable=abstract-method
"""An extended version of the dltlyse Plugin, which automatically handles some common operations.
A get_report method is provided, which automatically gets the report_output member, converts it
to a string and writes the result to a file with the class name (converting all capital letters
to '_' + their lowercase) + .txt appended as filename.
So, basically a plugin has just to collect its data and put them in the report_output member.
get_report calls prepare_report before writing the report to the file, because sometimes a
preparation is needed to generate the final report.
This plugin provides also a facility for registering callbacks: it's enough to decorate them
with dlt_callback, providing the app_id and/or ctx_id filters (see dlt_callback's docstring).
All methods which are decorated will be automatically retrieved and registered.
Example:
@dlt_callback('LTM', 'MAIN')
def gather_version_info(self, frame):
pass
The plugin then will take care of calling the registered callbacks only when the proper filter
conditions are matched, so eventually they only have to look at the payload.
Finally, it automatically sets the log level to DEBUG, and creates a logger using the class
name. The logger is available as the logger member.
"""
def __init__(self):
"""Automatically sets a default for report (None -> no report) and logger."""
self.collect_and_register_callbacks()
self.report_output = None # Should be defined before calling the parent constructor.
super(CallBacksAndReportPlugin, self).__init__()
self.logger = logging.getLogger(self.get_plugin_name())
def collect_and_register_callbacks(self):
"""Collects and registers all dlt callbacks.
The dlt callbacks should be decorated with the dlt_callback decorator.
It also registers all message filters in class.message_filters.
"""
self.dlt_callbacks = defaultdict(list)
self.dlt_greedy_callbacks = []
for member_name in dir(self): # Scans the class members.
member = getattr(self, member_name)
filter_condition = getattr(member, "filter_condition", None)
if filter_condition:
if filter_condition[0] or filter_condition[1]:
if self.message_filters != "all":
self.message_filters.append(filter_condition) # pylint: disable=no-member
self.dlt_callbacks[filter_condition].append(member)
else:
self.message_filters = "all"
self.dlt_greedy_callbacks.append(member)
# pylint: disable=invalid-name
def add_callback_from_template_function(self, template_function, app_id, ctx_id, userdata):
"""Adds an additional callback which is automatically generated from a "template" function or method.
Args:
template_function(function or method): a function or method that acts a template, to be "specialized"
(according to the given app_id, ctx_id, payloads) to catch specific traces.
app_id(str): the app id.
ctx_id(str): the context id.
userdata(object): normally is a sequence of strings that should be matched in the trace payload, but in
reality it can be anything, since it's up to the template function to use this parameter as it wants.
"""
# Data should be converted to strings, since dltlyse fails to register a filter if it's using unicode strings.
app_id, ctx_id, userdata = (str(app_id), str(ctx_id),
str(userdata) if isinstance(userdata, string_types) else userdata)
callback = functools.partial(template_function, app_id=app_id, ctx_id=ctx_id, userdata=userdata)
callback = dlt_callback(app_id, ctx_id)(callback)
filter_condition = app_id, ctx_id
if filter_condition[0] or filter_condition[1]:
if self.message_filters != "all":
self.message_filters.append(filter_condition) # pylint: disable=no-member
self.dlt_callbacks[filter_condition].append(callback)
else:
self.message_filters = "all"
self.dlt_greedy_callbacks.append(callback)
def get_result_dir(self):
"""Return result directory"""
if not os.path.exists(EXTRACT_DIR):
os.makedirs(EXTRACT_DIR)
return EXTRACT_DIR
def report_filename(self):
"""Builds & returns a standard/base filename for the report."""
# Converts all uppercase letters in lowercase, pre-pending them with a '_'.
report_filename = re.sub(r"([A-Z])", r"_\1", self.get_plugin_name())
return report_filename.lower().strip("_") + ".txt"
def prepare_report(self):
"""It's invoked just before writing the report to file, in case that some operation needs
to be done to prepare the report with the final/required format (a string, or a list/tuple/
dict).
"""
pass
def get_report(self):
"""Provides automatic report generation.
prepare_report is called to ensure that the report is ready for writing.
Then the type of the report data is analyzed, to see if a JSON file (for list, tuple, or
dict data type) should be written instead of the normal string/text file.
"""
self.prepare_report()
if self.report_output is None:
return "No report is generated!"
return self.write_to_domain_file(self.report_filename(), str(self.report_output))
def write_to_domain_file(self, filename, report):
"""Write the given report to a file.
Args:
filename(str): the filename.
report(str): the string with the report to be saved.
"""
fullpath = os.path.join(self.get_result_dir(), filename)
with open(fullpath, "w") as report_file:
report_file.write(report)
self.logger.info("See %s", fullpath)
return fullpath
def __call__(self, message):
"""Dispatches the message to the registered callback.
The callbacks were registered with the dlt_callbacks decorator.
"""
for callback in self.dlt_callbacks[message.apid, message.ctid]: # pylint: disable=no-member
callback(message)
for callback in self.dlt_greedy_callbacks: # pylint: disable=no-member
callback(message)
dltlyse-1.1/dltlyse/core/report.py 0000664 0000000 0000000 00000006407 13572723506 0017351 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""Reporting for dltlyse"""
from collections import Counter
xunit_template = (
''
''
"{testcases}"
""
)
xunit_tc_template = dict(
error=(
''
''
""
"{attach}"
""
),
failure=(
''
''
""
"{attach}"
""
),
skipped=(
''
''
""
"{attach}"
""
),
success=(
''
""
"{attach}"
""
),
)
attachment_template = "[[ATTACHMENT|{filename}]]"
def xunit_render(result):
"""Render the result into XUnit chunk"""
kwargs = result.__dict__
kwargs["attach"] = "".join([attachment_template.format(filename=x) for x in kwargs.get("attach", [])])
return xunit_tc_template[result.state].format(**kwargs)
class Result(object):
"""Class representing a single testcase result"""
def __init__(self, classname="Unknown", testname="Unknown", state="success", stdout="", stderr="", message="",
attach=None):
self.classname = classname
self.testname = testname
self.state = state
self.stdout = stdout
self.stderr = stderr
self.message = message
if not attach:
attach = []
self.attach = attach
def __repr__(self):
return repr(self.__dict__)
def __eq__(self, other):
return self.__dict__ == other.__dict__
class XUnitReport(object):
"""Template class producing report in xUnit format"""
def __init__(self, outfile=False, testsuite_name="dltlyse"):
self.results = []
self.outfile = outfile
self.testsuite_name = testsuite_name
def add_results(self, results):
"""Adds a result to the report"""
self.results.extend(results)
def render(self):
"""Renders an XUnit report"""
kwargs = {}
kwargs["testsuite_name"] = self.testsuite_name
counts = Counter(x.state for x in self.results)
kwargs["testcases"] = "\n".join(xunit_render(x) for x in self.results)
kwargs["number_of_errors"] = counts["error"]
kwargs["number_of_failures"] = counts["failure"]
kwargs["number_of_skipped"] = counts["skipped"]
kwargs["number_of_tests"] = len(self.results)
report = xunit_template.format(**kwargs)
if self.outfile:
with open(self.outfile, "w") as reportfile:
reportfile.write(report)
dltlyse-1.1/dltlyse/core/tests/ 0000775 0000000 0000000 00000000000 13572723506 0016617 5 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/core/tests/__init__.py 0000664 0000000 0000000 00000000000 13572723506 0020716 0 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/core/tests/test_analyser.py 0000664 0000000 0000000 00000010160 13572723506 0022044 0 ustar 00root root 0000000 0000000 # Copyright (C) 2017, BMW Car IT GmbH. All rights reserved.
"""Tests for core analyser parts of dltlyse."""
import os
import signal
import threading
import time
from unittest import TestCase
from mock import Mock, call
from dlt.dlt import cDLT_FILE_NOT_OPEN_ERROR, DLT_EMPTY_FILE_ERROR
from dltlyse.core.analyser import DLTAnalyser
from dltlyse.core.utils import (dlt_example_stream, create_temp_dlt_file, single_random_dlt_message,
start_dlt_message, single_random_corrupt_dlt_message)
class AnalyserTests(TestCase):
"""Tests of the main dltlyse analyser class"""
def test_load_plugins(self):
"""Test plugin loading"""
obj = DLTAnalyser()
obj.load_plugins([])
self.assertIn("ExtractFilesPlugin", obj.show_plugins())
self.assertIn("TestSysErrorPlugin", obj.show_plugins())
def test_load_plugins_specific(self):
"""Test specific plugin loading"""
obj = DLTAnalyser()
obj.load_plugins([], plugins=["ExtractFilesPlugin"])
self.assertIn("ExtractFilesPlugin", obj.show_plugins())
self.assertNotIn("TestSysErrorPlugin", obj.show_plugins())
def test_dont_load_manually_executed_plugins(self): # pylint: disable=invalid-name
"""Test that a manually-executed plugin isn't automatically loaded"""
obj = DLTAnalyser()
obj.load_plugins([])
self.assertNotIn("HeavyLifecyclesAnalyzer", obj.show_plugins())
def test_load_plugins_exclude(self):
"""Test blacklisting of plugin loading"""
obj = DLTAnalyser()
obj.load_plugins([], exclude=["TestSysErrorPlugin"])
self.assertIn("ExtractFilesPlugin", obj.show_plugins())
self.assertNotIn("TestSysErrorPlugin", obj.show_plugins())
def test_analyse_file_sanity(self):
"""Simulate test run of the dltlyse with invalid dlt trace files"""
obj = DLTAnalyser()
obj.start_lifecycle = Mock()
obj.end_lifecycle = Mock()
obj.process_message = Mock()
obj.generate_reports = Mock()
xunit = Mock()
file_not_exist = "mock.dlt"
file_empty = create_temp_dlt_file(empty=True)
file_valid = create_temp_dlt_file(stream=dlt_example_stream)
obj.load_plugins([], plugins=["TestSysErrorPlugin"])
obj.run_analyse([file_not_exist, file_empty, file_valid], xunit, True, False)
self.assertNotIn(file_valid, obj.file_exceptions)
self.assertIn(cDLT_FILE_NOT_OPEN_ERROR, obj.file_exceptions[file_not_exist])
self.assertIn(DLT_EMPTY_FILE_ERROR, obj.file_exceptions[file_empty])
def test_corrupt_msg_live(self):
""" Simulate test run of the dltlyse live with corrupt message"""
def send_stop_signal(pid):
""" Send a stop signal to the live run """
time.sleep(0.1)
os.kill(pid, signal.SIGINT)
# Test with exactly MAX_BUFFER_SIZE MSGS and No Start
obj = DLTAnalyser()
obj.get_filters = Mock(return_value=[])
obj.start_lifecycle = Mock()
obj.end_lifecycle = Mock()
obj.process_message = Mock()
obj.generate_reports = Mock()
xunit = Mock()
stop_thread = threading.Thread(target=send_stop_signal, args=(os.getpid(),))
random_msgs = bytearray()
for i in range(60):
if i % 25 == 0:
random_msgs.extend(single_random_corrupt_dlt_message)
elif i % 15 == 0:
random_msgs.extend(start_dlt_message)
else:
random_msgs.extend(single_random_dlt_message)
file1 = create_temp_dlt_file(stream=random_msgs)
stop_thread.start()
obj.run_analyse([file1], xunit, True, True)
self.assertEqual(
obj.start_lifecycle.mock_calls,
[
call('MGHS', 0), call('MGHS', 1), call('MGHS', 2), call('MGHS', 3),
],
)
self.assertEqual(obj.process_message.call_count, 57)
self.assertEqual(obj.end_lifecycle.call_count, 4)
self.assertEqual(obj.dlt_file.corrupt_msg_count, 3)
self.assertEqual(obj.generate_reports.mock_calls, [call(xunit, "dltlyse")])
dltlyse-1.1/dltlyse/core/tests/test_python_data_to_xml.py 0000664 0000000 0000000 00000010416 13572723506 0024126 0 ustar 00root root 0000000 0000000 # Copyright (C) 2017, BMW Car IT GmbH. All rights reserved.
"""Tests for data_to_xml_string plugin for dltlyse."""
from unittest import TestCase
from dltlyse.core.utils import data_to_xml_string
class TestDataToXMLString(TestCase):
"""data_to_xml_string unit tests."""
def test_only_the_tag_present(self):
"""Tests that only the tag is present."""
data = 'foo'
self.assertEqual(data_to_xml_string(data), '''
''')
def test_tag_and_value_present(self):
"""Tests that the tag and value are present."""
data = 'foo', 'bar'
self.assertEqual(data_to_xml_string(data), '''
bar
''')
def test_tag_and_attributes_present(self):
"""Tests that the tag and attributes are present."""
data = 'foo', {'bar': '123'}
self.assertEqual(data_to_xml_string(data), '''
''')
def test_tag_and_attributes_and_value_present(self): # pylint: disable=invalid-name
"""Tests that the tag, attributes, and value are present."""
data = 'foo', {'bar': '123'}, 'spam'
self.assertEqual(data_to_xml_string(data), '''
spam
''')
def test_value_is_a_tuple(self):
"""Tests that the value is a tuple which generates sub-tags."""
data = 'foo', ('bar', 'spam')
self.assertEqual(data_to_xml_string(data), '''
\t
\t
''')
def test_value_is_a_list(self):
"""Tests that the value is a list which generates sub-tags."""
data = 'foo', ['bar', 'spam']
self.assertEqual(data_to_xml_string(data), '''
\t
\t
''')
def test_value_is_not_a_string(self):
"""Tests that the value is not a string.
It can be any value, that can be converted to a string. A float is used for the test.
"""
data = 'foo', 1.5
self.assertEqual(data_to_xml_string(data), '''
1.5
''')
def test_children_with_complex_data_structure(self): # pylint: disable=invalid-name
"""Tests that children can use the same rules of the main tag.
It allows to easily define more complex data structures.
"""
data = 'foo', ['bar', ('spam', 123), ('droogs', {'milk': 'plus'})]
self.assertEqual(data_to_xml_string(data), '''
\t
\t123
\t
''')
def test_children_with_the_same_tag_name(self): # pylint: disable=invalid-name
"""Tests that the tag name for children can be defined just ones.
When all children share the same tag name, it's possible to define it once (with the
special $tag attribute), and then it'll be automatically used by all of them.
"""
data = 'foo', {'$tag': 'bar'}, [1, 2, 3]
self.assertEqual(data_to_xml_string(data), '''
\t1
\t2
\t3
''')
def test_children_with_the_same_attributes(self): # pylint: disable=invalid-name
"""Tests that the attributes for children can be defined just ones.
When all children share the same attributes, it's possible to define them once (with the
special $attr attribute), and then they'll be automatically used by all of them.
"""
data = 'foo', {'$attr': {'bar': 'spam'}}, ['droogs', 'milk', 'plus']
self.assertEqual(data_to_xml_string(data), '''
\t
\t
\t
''')
def test_children_with_the_same_name_and_attributes(self): # pylint: disable=invalid-name
"""Tests that the tag name and attributes for children can be defined just ones.
When all children share the same name and attributes, it's possible to define them once
(with the special $tag and $attr attributes), and then they'll be automatically used by all
of them.
"""
data = 'foo', {'$tag': 'bar', '$attr': {'milk': 'plus'}}, [1, 2, 3]
self.assertEqual(data_to_xml_string(data), '''
\t1
\t2
\t3
''')
dltlyse-1.1/dltlyse/core/utils.py 0000664 0000000 0000000 00000031365 13572723506 0017177 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""Helper functions"""
import atexit
import logging
import os
import tempfile
from decimal import Decimal
import xml.dom.minidom
from xml.etree.ElementTree import Element, SubElement, tostring as xml_element_to_string
logger = logging.getLogger(__name__)
dlt_example_stream = (
b"DLT\x01#o\xd1WD>\x0c\x00MGHS5\x00\x00YMGHS\x00\x01\x80\xd1&\x01DA1\x00DC1\x00\x03\x00\x00\x00"
b"\x07\x01\x00SYS\x00\x01\x00FILE\xff\xff\x16\x00File transfer manager.\x12\x00"
b"DLT System ManagerremoDLT\x01#o\xd1Wo>\x0c\x00MGHS=\x00\x01PMGHS\x00\x00\x03\xf4\x00"
b"\x01i\xa6A\x05SYS\x00JOUR\x00\x02\x00\x00\x1b\x002011/11/11 11:11:18.005274\x00\x00\x02\x00\x00"
b"\t\x006.005274\x00\x00\x02\x00\x00\x16\x00systemd-journal[748]:\x00\x00\x02\x00\x00\x0f\x00"
b"Informational:\x00\x00\x02\x00\x00\xcf\x00Runtime journal (/run/log/journal/) is currently"
b" using 8.0M.\nMaximum allowed usage is set to 385.9M.\nLeaving at least 578.8M free (of"
b" currently available 3.7G of space).\nEnforced usage limit is thus 385.9M.\x00"
)
file_with_two_lifecycles = (
b"DLT\x01\xc5\x82\xdaX\x82o\x0e\x00MG1S=\x00\x00NMG1S" # first lifecycle
b"\x00\x00\x02r\x00\x00\x8frA\x01DLTDINTM\x00\x02\x00\x00.\x00"
b"Daemon launched. Starting to output traces...\x00"
b"DLT\x01m\xc2\x91Y\x9f\xda\x07\x00MGHS5\x00\x00 MGHS" # no new lifecycle
b"\x00\x00_\xde&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
b"DLT\x01m\xc2\x91Y\xad\xe4\x07\x00MGHS=\x01\x00zMGHS" # random trace
b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00"
b"ApplicationID 'DBSY' registered for PID 689, Description=DBus"
b" Logging|SysInfra|Log&Trace\n\x00"
b"DLT\x01\xed\xc2\x91Y\x0f\xf0\x08\x00MGHS5\x00\x00 MGHS" # trace to buffer
b"\x00\x00\x9dC&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
b"DLT\x01\xed\xc2\x91Y\x17.\n\x00MG2S=\x00\x00NMG2S" # new lifecycle
b"\x00\x00\x02\xae\x00\x00@/A\x01DLTDINTM\x00\x02\x00\x00.\x00"
b"Daemon launched. Starting to output traces...\x00"
)
file_with_lifecycles_without_start = (
b"DLT\x01\xc5\x82\xdaX\x19\x93\r\x00XORA'\x01\x00\x1bXORA" # trace to buffer
b"\x16\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x04\x00\x00\x00\x00"
b"DLT\x01\xc5\x82\xdaXQi\x0e\x00MGHS5\x00\x00 MGHS" # trace to buffer
b"\x00\x03U\xe0&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
b"DLT\x01m\xc2\x91Y\xad\xe4\x07\x00MGHS=\x01\x00zMGHS" # random trace
b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00"
b"ApplicationID 'DBSY' registered for PID 689, Description=DBus"
b" Logging|SysInfra|Log&Trace\n\x00"
b"DLT\x01\xed\xc2\x91Y\x0f\xf0\x08\x00MGHS5\x00\x00 MGHS" # trace to buffer
b"\x00\x00\x9dC&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
b"DLT\x01\xed\xc2\x91Y\x17.\n\x00MG3S=\x00\x00NMG3S" # new lifecycle
b"\x00\x00\x02\xae\x00\x00@/A\x01DLTDINTM\x00\x02\x00\x00.\x00"
b"Daemon launched. Starting to output traces...\x00"
)
single_random_dlt_message = bytearray(
b"DLT\x01m\xc2\x91Y\xad\xe4\x07\x00MGHS=\x01\x00zMGHS" # random trace
b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00"
b"ApplicationID 'DBSY' registered for PID 689, Description=DBus"
b" Logging|SysInfra|Log&Trace\n\x00"
)
start_dlt_message = bytearray(
b"DLT\x01\xed\xc2\x91Y\x17.\n\x00MGHS=\x00\x00NMGHS" # new lifecycle
b"\x00\x00\x02\xae\x00\x00@/A\x01DLTDINTM\x00\x02\x00\x00.\x00"
b"Daemon launched. Starting to output traces...\x00"
)
single_random_corrupt_dlt_message = bytearray(
b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00" # random corrupt trace
b"ApplicationID 'DBSY' registered for PID 689, Description=DBus"
b" Logging|SysInfra|Log&Trace\n\x00"
)
single_bufferable_trace_1 = bytearray(
b"DLT\x01\xc5\x82\xdaX\x19\x93\r\x00XORA'\x01\x00\x1bXORA" # trace to buffer
b"\x16\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x04\x00\x00\x00\x00"
)
single_bufferable_trace_2 = bytearray(
b"DLT\x01\xc5\x82\xdaXQi\x0e\x00MGHS5\x00\x00 MGHS" # trace to buffer
b"\x00\x03U\xe0&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00"
)
def seconds_to_human_readable(seconds):
"""Splits seconds and returns a string in the form hr:min:secs.ms"""
secs, msecs = divmod(seconds, 1)
mins, secs = divmod(int(seconds), 60)
hrs, mins = divmod(mins, 60)
return "{:d}:{:02d}:{:02d}.{:02.0f}".format(hrs, mins, secs, msecs*100)
def data_to_xml_tree(data, parent=None, child_tag=None, child_attrib=None):
"""Converts a Python structure in an ElementTree structure.
The key concept when using this function is that for generating a valid XML ElementData, three
information should be available: tag, attributes, and value/children. Some of such information
can be omitted if it's optional (so, not specified) or if it can be already extracted by the
context. Of course, at least the tag information should be provided in some way.
Usually a tuple of three elements is passed to fully qualify all three required data.
For example, passing ('foo', {'bar': '123'}, 'spam'), as the data parameter, generates an
ElementTree structure which, once converted to string, looks like:
spam
To generate only a node with the tag, it's enough to call the function with only
a string as parameter (the tag). For example, 'foo' gives back:
That's because no tuple was provided, but only a basic primitive (a string), and since the tag
is mandatory, it's automatically assumed that the string has to be used as the tag.
Instead, passing the tuple ('foo', 'bar') generates:
bar
In this case the second element should contain either the attributes or the value(s) of the tag,
but since it's not a dictionary (the only data type which can used to specify the list of
attributes with their values), it's automatically assumed to be used as the value.
Finally, passing ('foo', {'bar': '123'}) generates:
That's because the two elements tuple has not enough information, but the second element is a
dictionary, so it's automatically used for the tag's attributes.
A list or tuple can also be passed as the tag's value, and in this case a deeper XML structure
is generated. For example, passing ('foo', ['bar', 'spam']) generates:
To each list's element is applied the same logic defined before, so a tuple/list can be passed
as well, to better qualify each sub-tag. For example, passing
('foo', ['bar', ('spam', 123), ('droogs', {'milk': 'plus'})]) generates:
123
Sometimes the sub-tags share the same tag name, so a mechanism is defined in order to avoid to
specify it for all of them. In this case, a special key in the main tag's attributes can be
used: '$tag'. For example, ('foo', {'$tag': 'bar'}, [1, 2, 3]) generates:
1
2
3
So, the application can focus on providing only the concrete data that should be generated.
Similarly, if the sub-tags use the same attributes sets, a special key in the main tag's
attributes can be used: '$attr'. For example,
('foo', {'$attr': {'bar': 'spam'}}, ['droogs', 'milk', 'plus']) generates:
A combination of $tag and $attr can be used as well, so passing
('foo', {'$tag': 'bar', '$attr': {'milk': 'plus'}}, [1, 2, 3]) generates:
1
2
3
Finally, it has to be noted that if the value information isn't a list or tuple, it'll be
automatically converted to a string. For example, ('foo', datetime.datetime.now()) generates:
2017-02-20 09:20:12.746000
Args:
data(list, tuple, dict, or any type): the Python data structure. See above for more details.
parent(Element): the parent node (if available).
child_tag(str, None): the tag to be used for direct children (if any).
child_attrib(dict, None): the attributes to to be used for direct children (if any).
"""
# print('data_to_xml_tree: data={}, parent={}, child_tag={}, child_attrib={}'.format(
# data, parent, child_tag, child_attrib), file=out)
attrib, value = {}, None
if child_tag: # Have: tag. Miss: attrib, value
tag, child_tag = child_tag, None
if child_attrib is not None: # Have: tag, attrib. Miss: value
attrib, child_attrib, value = child_attrib, {}, data
else: # Have: tag, Miss: attrib, value
if isinstance(data, dict):
attrib = data
elif isinstance(data, (tuple, list)):
if len(data) == 2:
attrib, value = data
else:
tag, attrib, value = data[:3]
else:
value = data
else: # Miss: tag, attrib, value
if child_attrib is not None: # Have: attrib. Miss: tag, value
attrib, child_attrib = child_attrib, {}
if isinstance(data, (tuple, list)):
if len(data) == 2:
tag, value = data
else:
tag, attrib, value = data[:3]
else:
tag = data
else: # Miss: tag, attrib, value
if isinstance(data, (tuple, list)):
if len(data) == 2:
tag, data = data
if isinstance(data, dict):
attrib = data
else:
value = data
else:
tag, attrib, value = data[:3]
else:
tag = data
if attrib:
# The original attribute dictionary should be preserved, because it might be used by other
# tags. That's because we'll remove some keys, if they are present. See below.
attrib = attrib.copy()
new_child_tag = attrib.pop('$tag', None)
if new_child_tag is not None:
child_tag = new_child_tag
new_child_attrib = attrib.pop('$attr', None)
if new_child_attrib is not None:
child_attrib = new_child_attrib
text, children = (None, value) if isinstance(value, (tuple, list)) \
else (str(value) if value is not None else None, ())
node = Element(tag, attrib) if parent is None else SubElement(parent, tag, attrib)
if text is not None:
node.text = text
for child in children:
data_to_xml_tree(child, node, child_tag, child_attrib)
return node
def data_to_xml_string(data, prettify=True, indent='\t', newline='\n'):
"""Generates an XML string representation of a Python structure according to data_to_xml_tree.
Args:
data(list, tuple, dict, or any type): the Python data structure. See data_to_xml_tree.
prettify(bool): True if the XML string should be reformatted with a nice output.
indent(str): the string to be used for indenting the XML elements.
newline(str): the string to be used when an XML element is complete.
"""
xml_string = xml_element_to_string(data_to_xml_tree(data))
if prettify:
xml_data = xml.dom.minidom.parseString(xml_string)
xml_string = xml_data.toprettyxml(indent, newline)
return xml_string
def create_temp_dlt_file(stream=None, dlt_message=None, empty=False):
""" Creates temporary DLT trace files for testing purposes
Args:
stream: A byte stream variable containing a stream in byte hex format
dlt_message(DLTMessage object): A dlt message object to be converted into temporary file
empty(bool): True will just create an empty DLT file
"""
_, tmpname = tempfile.mkstemp()
if empty:
return tmpname
msg = ()
if dlt_message:
msg = dlt_message.to_bytes()
else:
msg = stream
tmpfile = open(tmpname, 'wb')
tmpfile.write(msg)
tmpfile.flush()
tmpfile.seek(0)
tmpfile.close()
atexit.register(os.remove, tmpname)
return tmpname
def round_float(val, precision=4):
""" Rounds off the floating point number to correct precision
regardless of underlying platform floating point precision
Args:
val(float): The value that needs to be rounded off
precision(int): Number of decimal places to round off
"""
decimal_points = Decimal(10) ** -(precision)
result_val = Decimal(val).quantize(decimal_points)
return result_val if result_val.normalize() == result_val.to_integral() else result_val.normalize()
dltlyse-1.1/dltlyse/plugins/ 0000775 0000000 0000000 00000000000 13572723506 0016206 5 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/plugins/__init__.py 0000664 0000000 0000000 00000000000 13572723506 0020305 0 ustar 00root root 0000000 0000000 dltlyse-1.1/dltlyse/plugins/context.py 0000664 0000000 0000000 00000001257 13572723506 0020251 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""Count DLTD INTM DLT messages"""
from dltlyse.core.plugin_base import Plugin
class ContextPlugin(Plugin):
"""Count DLTD INTM messages"""
message_filters = [("DLTD", "INTM")]
matched_messages = 0
def __call__(self, message):
if message.apid == "DLTD" and message.ctid == "INTM":
self.matched_messages += 1
def report(self):
if self.matched_messages > 0:
self.add_result(stdout="found {} DLTD INTM messages".format(self.matched_messages))
else:
self.add_result(state="failure", message="could not find any DLTD INTM messages in the trace")
dltlyse-1.1/dltlyse/plugins/extract_files.py 0000664 0000000 0000000 00000013070 13572723506 0021415 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""Extracting all files from DLT trace
Example:
$ python dltlyse.py -p ExtractFilesPlugin vmwx86_full_trace.dlt
"""
from __future__ import print_function
import logging
import os
from collections import OrderedDict
from dltlyse.core.plugin_base import Plugin, EXTRACT_DIR
COREDUMP_DIR = "Coredumps"
FULL_EXTRACT_DIR = os.path.join(EXTRACT_DIR, COREDUMP_DIR)
logger = logging.getLogger(__name__)
class File(object):
"""File data"""
def __init__(self, transfer_id, filename):
self.transfer_id = transfer_id
self.filename = filename
self.index = 0
self.error = False
self.finished = False
# store the temporary (part) file in the extracted_files/Coredumps/${transfer_id}/${filename}.part
self._part_filepath = os.path.join(FULL_EXTRACT_DIR, self.transfer_id, self.filename + ".part")
# warn if the file has been already extracted before (not finished extraction)
if os.path.exists(self._part_filepath):
logger.warning("File '%s' exists already!", self._part_filepath)
# make sure the extracted_files/Coredumps/${transfer_id} directory exists
if not os.path.exists(os.path.join(FULL_EXTRACT_DIR, self.transfer_id)):
os.makedirs(os.path.join(FULL_EXTRACT_DIR, self.transfer_id))
self.handle = open(self._part_filepath, "wb")
def close(self):
"""Close the handle and rename file to be completed"""
self.handle.close()
# move the file into the extracted_files/Coredumps/ if it does not exist already.
# Otherwise keep it in the trasfer_id subdirectory and remove the ".part" suffix
if self.error is False:
final_name = os.path.join(FULL_EXTRACT_DIR, self.filename)
if not os.path.exists(final_name):
os.rename(self._part_filepath, final_name)
try:
os.rmdir(os.path.dirname(self._part_filepath))
except OSError:
pass
else:
os.rename(self._part_filepath, os.path.join(os.path.dirname(self._part_filepath), self.filename))
def __repr__(self):
return self.filename
class ExtractFilesPlugin(Plugin):
"""Extracting all files from DLT trace"""
message_filters = [("SYS", "FILE"), ("FLT", "FILE")]
extracted_files = {}
success = False
counter = 0
def __call__(self, message):
if message.apid in ["SYS", "FLT"] and message.ctid == "FILE":
# file transfer payload header
# FLST - file trasfer start - first DLT message from the file transfer
# ["FLST", transfer_id, filename, length, date, "FLST"]
# FLDA - file data
# ["FLDA", transfer_id, index, data, "FLDA"]
# FLFI - file transfer end
# ["FLFI", transfer_id, "FLFI"]
payload_header = message.payload[0].decode('utf8')
transfer_id = str(message.payload[1]) # used as a dictionary key
if payload_header == "FLST":
filename = message.payload[2].decode('utf8')
filename = os.path.basename(filename) # ignore whatever path is included in DLT
logger.info("Found file '%s' in the trace", filename)
extr_file = File(transfer_id=transfer_id, filename=filename)
self.extracted_files[transfer_id] = extr_file
elif payload_header == "FLDA":
extr_file = self.extracted_files[transfer_id]
extr_file.index += 1
if extr_file.index != message.payload[2]:
if not extr_file.error:
logger.error("Expected index %d, got %d, failing file %s",
extr_file.index, message.payload[2], extr_file.filename)
extr_file.error = True
extr_file.handle.write(message.payload[3])
elif payload_header == "FLFI":
extr_file = self.extracted_files[transfer_id]
extr_file.finished = True
extr_file.close()
def report(self):
bad_files = []
text = "extracted files found:\n"
sorted_extracted_files = OrderedDict(sorted(self.extracted_files.items()))
successful_attachments = [
os.path.join(COREDUMP_DIR, x.filename)
for x in sorted_extracted_files.values()
if not x.error and x.finished
]
for extr_file in sorted_extracted_files.values():
text += " - {}".format(extr_file.filename)
if extr_file.error:
bad_files.append(extr_file.filename)
text += " ERROR: File parts missing!"
if extr_file.finished is False:
if os.path.join(COREDUMP_DIR, extr_file.filename) in successful_attachments:
# another file transfer of the same file succeeded
logger.warning("File '%s' is not complete", extr_file.filename)
else: # file hasn't been re-transferred - error
bad_files.append(extr_file.filename)
logger.error("File '%s' is not complete", extr_file.filename)
text += " ERROR: File not complete!"
text += "\n"
if bad_files:
self.add_result(state="error", message="Error extracting {} files".format(len(set(bad_files))),
stdout=text, attach=successful_attachments)
else:
self.add_result(stdout=text, attach=successful_attachments)
dltlyse-1.1/dltlyse/plugins/sys_errors.py 0000664 0000000 0000000 00000003026 13572723506 0020773 0 ustar 00root root 0000000 0000000 # Copyright (C) 2017. BMW Car IT GmbH. All rights reserved.
"""Search SYS|JOUR for detected errors"""
import collections
import re
from dltlyse.core.plugin_base import Plugin
class TestSysErrorPlugin(Plugin):
"""Errors found by SYS|JOUR"""
# relevant APIDs and CTIDs to filter for
# - SYS|JOUR: error detection
message_filters = [("SYS", "JOUR")]
shared_regex = re.compile(r"\[[0-9]*\]: (?P\S*?): error while loading shared libraries: "
r"(?P\S*?): cannot open shared object file")
errors = collections.defaultdict(set)
def __call__(self, message):
"""Handle traces"""
if not (message.apid == "SYS" and message.ctid == "JOUR"):
return
payload_decoded = str(message.payload_decoded)
match = self.shared_regex.search(payload_decoded)
if match:
self.errors["error while loading shared libraries"].add("{} faild to load {}".format(
match.group("program"), match.group("librabry")))
def report(self):
"""Report if errors were found"""
if self.errors:
message = "\n".join(self.errors.keys())
stdout = []
for error in self.errors:
stdout.append("{}:\n{}".format(error, "\n".join(self.errors[error])))
self.add_result(
state="failure",
message=message,
stdout="\n---\n".join(stdout)
)
else:
self.add_result(message="No errors found")
dltlyse-1.1/dltlyse/plugins/sysmem_plugin.py 0000664 0000000 0000000 00000004612 13572723506 0021456 0 ustar 00root root 0000000 0000000 """Parses DLT messages from the Monitor tool to gather system RAM usage"""
from csv import writer
from dltlyse.core.plugin_base import Plugin
class SysmemPlugin(Plugin):
"""Report system memory information"""
message_filters = [("MON", "MEMS")]
pathname = "sysmem_report.csv"
lifecycle_csv_fields = ("lifecycle", "time", "mem_total", "mem_available", "buffers", "cached", "shared")
field_mapping = {
"MemTotal": "mem_total",
"MemAvailable": "mem_available",
"Buffers": "buffers",
"Cached": "cached",
"Shmem": "shared",
}
def __init__(self):
self.csv_fileobj = None
self.csv = None
self.lifecycle = None
self.min_mem_available = None
super(SysmemPlugin, self).__init__()
def new_lifecycle(self, ecu_id, lifecycle_id):
"""New device start"""
if not self.csv: # Only create the report file if this plugin is actually run
self.csv_fileobj = open(self.pathname, "w")
self.csv = writer(self.csv_fileobj)
self.csv.writerow(self.lifecycle_csv_fields)
self.lifecycle = lifecycle_id
super(SysmemPlugin, self).new_lifecycle(ecu_id, lifecycle_id)
def __call__(self, message):
data = {"lifecycle": str(self.lifecycle), "time": message.tmsp}
for combo_value in message.payload_decoded.split("MB"):
if ":" not in combo_value:
continue
field, value = combo_value.split(":")
field = field.strip()
value = int(float(value) * 1024)
if field == "MemAvailable":
self.min_mem_available = min(value, self.min_mem_available) if self.min_mem_available else value
if field in self.field_mapping:
data[self.field_mapping[field]] = value
self.csv.writerow([str(data.get(k, "")) for k in self.lifecycle_csv_fields])
def end_lifecycle(self, ecu_id, lifecycle_id):
"""Device shut down"""
self.csv_fileobj.flush()
super(SysmemPlugin, self).end_lifecycle(ecu_id, lifecycle_id)
def report(self):
"""Close report files and attach them to a test result"""
self.csv.close()
self.csv_fileobj.close()
if self.min_mem_available < 1024 * 1024:
self.add_result(message="Available memory dropped below 1Gb", state="failure")
self.add_attachments(self.pathname)
dltlyse-1.1/dltlyse/run_dltlyse.py 0000775 0000000 0000000 00000012044 13572723506 0017447 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""DLT file analyser"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import argparse
import fnmatch
import logging
import os
import sys
from dltlyse.core.analyser import DLTAnalyser
# pylint: disable=dangerous-default-value
logger = logging.getLogger("dltlyse")
def parse_options(args=sys.argv[1:]):
"""parse command line parameters"""
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--config", dest="config_file", metavar="FILE",
help="Use specific config file")
args, remaining_args = conf_parser.parse_known_args(args)
defaults = {"plugins": None}
if args.config_file:
if not os.path.exists(args.config_file):
raise IOError("Configuration file '{}' could not be found.".format(args.config_file))
config = configparser.ConfigParser()
config.read([args.config_file])
defaults = dict(config.items("default"))
# https://gist.github.com/von/949337/
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
# convert string to list
if isinstance(defaults["plugins"], str):
defaults["plugins"] = defaults["plugins"].split(',')
parser.set_defaults(**defaults)
parser.add_argument("-d", "--plugins-dir", dest="plugin_dirs", action="append", default=[],
help="Add directory to search for plugins")
parser.add_argument("--no-default-dir", dest="no_default_dir", action="store_true", default=False,
help="Do not look for plugins in the default directories")
parser.add_argument("-p", "--plugins", dest="plugins", action="append", default=defaults["plugins"],
help="Initialize only explicitly listed plugin classes")
parser.add_argument("--exclude", dest="exclude", action="append",
help="Exclude listed plugin classes")
parser.add_argument("-s", "--show-plugins", dest="show_plugins", action="store_true", default=False,
help="Show available plugins")
parser.add_argument("-r", "--recursive", dest="recursive_search", action="store_true", default=False,
help="Search directories for traces recursively")
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False,
help="Turn on verbose messages")
parser.add_argument("-x", "--xunit", dest="xunit", default="dltlyse_results.xml",
help="Generate result file in xUnit format to the specified file")
parser.add_argument("--xunit-testsuite-name", dest="xunit_testsuite_name", default="dltlyse",
help="Testsuite name used inside the xunit results file")
parser.add_argument("--no-sort", dest="no_sort", action="store_true", default=False,
help="Compatibility option - ignored")
parser.add_argument("--live-run", dest="live_run", action="store_true", default=False,
help="Do a live run of DLTlyse plugins on incoming DLT logs")
parser.add_argument("traces", nargs="*", help="DLT trace files")
return parser.parse_args(remaining_args)
def main():
"""Entry point"""
logging.basicConfig(level=logging.INFO)
options = parse_options()
logging.root.setLevel(logging.DEBUG if options.verbose is True else logging.INFO)
if len(options.traces) > 1 and options.live_run:
logger.error("DLTlyse does not support multiple trace files with '--live-run' option.")
return 1
analyser = DLTAnalyser()
analyser.load_plugins(
plugin_dirs=options.plugin_dirs, plugins=options.plugins,
exclude=options.exclude, no_default_dir=options.no_default_dir,
)
if options.show_plugins:
print(analyser.show_plugins(), file=sys.stderr)
return 0
traces = []
for trace in options.traces:
if os.path.isdir(trace):
if options.recursive_search is True:
for root, _, filenames in os.walk(trace):
for filename in fnmatch.filter(filenames, "*.dlt"):
traces.append(os.path.join(root, filename))
else:
for filename in fnmatch.filter(os.listdir(trace), "*.dlt"):
traces.append(os.path.join(trace, filename))
else:
traces.append(trace)
return analyser.run_analyse(traces, xunit=options.xunit, no_sort=True,
is_live=options.live_run, testsuite_name=options.xunit_testsuite_name)
if __name__ == "__main__":
sys.exit(main())
dltlyse-1.1/requirements.txt 0000664 0000000 0000000 00000000004 13572723506 0016323 0 ustar 00root root 0000000 0000000 dlt
dltlyse-1.1/run-dltlyse 0000775 0000000 0000000 00000000315 13572723506 0015254 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""DLT file analyser"""
import sys
from dltlyse.run_dltlyse import main
if __name__ == "__main__":
sys.exit(main())
dltlyse-1.1/setup.cfg 0000664 0000000 0000000 00000022643 13572723506 0014675 0 ustar 00root root 0000000 0000000 [pep8]
max-line-length=119
[nosetests]
verbosity=2
detailed-errors=1
with-xunit=1
xunit-file=junit-reports/framework_test_results.xml
[MASTER]
# Fix for pylint1.4 security change on import of C extensions.
extension-pkg-whitelist=lxml
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
init-hook='import sys; sys.path.append("/usr/lib")'
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time. See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
# Disabled messages
# C0325 Unnecessary parens after 'print' keyword
# I0011 Locally disabling %s Used when an inline option disable a message or a messages category
# R0201 Method could be a function
# R0902 Too many instance attributes
# R0903 Too few public methods
# R0912 Too many branches (%s/%s) Used when a function or method has too many branches, making it hard to follow.
# R0913 Too many arguments
# R0921 Abstract class not referenced
# R0922 Abstract class is only referenced 1 times
# R0801 Similar code in multiple files
# W0511 Fixme
# W0613 Unused argument
disable=I0011,R0201,R0902,R0903,R0912,R0913,R0921,R0922,R0801,C0325,W0511,W0613
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Za-z_][A-Za-z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct attribute names in class
# bodies
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=__.*__
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of dummy variables
# (i.e. not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject, _socketobject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=119
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
dltlyse-1.1/setup.py 0000775 0000000 0000000 00000003010 13572723506 0014554 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
"""Setup of DLTlyse"""
import os
import subprocess
from setuptools import setup, find_packages
__version__ = "1.0.0"
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Testing"
]
extra = {}
extra["install_requires"] = open("requirements.txt").read().splitlines()
try:
version_git = os.getenv("GITPKGVTAG", None) or subprocess.check_output(["git", "rev-parse",
"--short", "HEAD"]).rstrip()
except (subprocess.CalledProcessError, OSError):
version_git = "unknown"
pkg_version = "{}+{}".format(__version__, version_git)
setup(
name="dltlyse",
version=pkg_version,
description="DLT trace file analyser for the BMW head unit platform",
long_description=open("README.md").read(),
author="BMW Car IT",
license="MPL 2.0",
url="https://github.com/bmwcarit/dltlyse",
keywords="dltlyse DLT trace analyse analyser testing testautomation test framework",
platforms="any",
classifiers=CLASSIFIERS,
zip_safe=False,
packages=find_packages(exclude=["tests", "tests.*"]),
tests_require=["coverage"],
entry_points={
"console_scripts": [
"dltlyse = dltlyse.run_dltlyse:main",
]
},
**extra
)
dltlyse-1.1/tests/ 0000775 0000000 0000000 00000000000 13572723506 0014207 5 ustar 00root root 0000000 0000000 dltlyse-1.1/tests/system_tests.py 0000664 0000000 0000000 00000003313 13572723506 0017327 0 ustar 00root root 0000000 0000000 # Copyright (C) 2016. BMW Car IT GmbH. All rights reserved.
"""Basic DLTlyse tests"""
import sys
from nose.tools import assert_greater, assert_in, assert_equal, assert_true
from mtee.testing.tools import assert_process_returncode, run_command
from mtee.tools.nose_parametrize import nose_parametrize
from dltlyse.core.utils import seconds_to_human_readable
class TestsDltlyse(object):
"""Test DLTlyse exections"""
sdk_path = "/"
command = [sys.executable, "./run-dltlyse"]
def test_show_plugins(self):
"""Test DLTlyse show plugin execution"""
cmd = self.command + ["-s"]
result = run_command(cmd)
assert_process_returncode(0, result, "dltlyse execution failed. Expected pass")
assert_in("Available plugins", result.stderr)
assert_greater(len(result.stderr.split('\n')), 1, "No plugins loaded")
def test_no_traces(self):
"""Test DLTlyse run without traces"""
cmd = self.command
result = run_command(cmd)
# TODO - re-enable once traffic load plugin is fixed
# assert_process_returncode(0, result, "dltlyse execution without traces failed, expected a pass")
assert_true(result)
@nose_parametrize((0.01, "0:00:00.01"),
(0.1, "0:00:00.10"),
(0.25, "0:00:00.25"),
(1, "0:00:01.00"),
(1.25, "0:00:01.25"),
(61, "0:01:01.00"),
(61.2, "0:01:01.20"),
(3600, "1:00:00.00"),
(3661.25, "1:01:01.25"))
def test_seconds_to_human_readable(seconds, result):
"""Test conversion of seconds to human readable time string"""
assert_equal(seconds_to_human_readable(seconds), result)