pax_global_header00006660000000000000000000000064135471063530014521gustar00rootroot0000000000000052 comment=b1edfaa69ddde00035e980e0fc1313f92bb74970 python-dlt-2.0/000077500000000000000000000000001354710635300134645ustar00rootroot00000000000000python-dlt-2.0/.coveragerc000066400000000000000000000000771354710635300156110ustar00rootroot00000000000000[run] branch = True source = dlt [report] show_missing = True python-dlt-2.0/.gitignore000066400000000000000000000022631354710635300154570ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ python-dlt-2.0/.pylintrc000066400000000000000000000272321354710635300153370ustar00rootroot00000000000000[MASTER] # Specify a configuration file. rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Pickle collected data for later comparisons. persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= # Use multiple processes to speed up Pylint. jobs=1 # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist= # Allow optimization of some AST trees. This will activate a peephole AST # optimizer, which will apply various small optimizations. For instance, it can # be used to obtain the result of joining multiple strings with the addition # operator. Joining a lot of strings can lead to a maximum recursion error in # Pylint and this flag can prevent that. It has one side effect, the resulting # AST will be different than the one from reality. optimize-ast=no [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=yes # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. See also the "--disable" option for examples. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" disable=I0011,R0201,R0902,R0903,R0912,R0913,R0921,R0922,R0801,C0325,W0511 [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=_$|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_,_cb [SPELLING] # Spelling dictionary name. Available dictionaries: none. To make it working # install python-enchant package. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis ignored-modules= # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes=SQLObject # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [FORMAT] # Maximum number of characters on a single line. max-line-length=119 # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no # List of optional constructs for which whitespace checking is disabled no-space-check=trailing-comma,dict-separator # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [BASIC] # Required attributes for module, separated by a comma required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Include a hint for the correct naming format with invalid-name include-naming-hint=no # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for variable names variable-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Naming hint for module names module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Naming hint for constant names const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Naming hint for class attribute names class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming hint for class names class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming hint for inline iteration names inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for function names function-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for attribute names attr-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for method names method-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for argument names argument-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=__.*__ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branches=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [CLASSES] # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=stringprep,optparse # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception python-dlt-2.0/.travis.yml000066400000000000000000000001671354710635300156010ustar00rootroot00000000000000language: generic services: - docker before_script: - make build-image script: - make lint - make unit-test python-dlt-2.0/Dockerfile000066400000000000000000000017041354710635300154600ustar00rootroot00000000000000FROM debian:buster as builder ARG LIBDLT_VERSION=2.18.4 RUN apt-get update \ && apt-get upgrade -y \ && apt-get install -y build-essential git cmake libdbus-1-dev cmake-data \ libdbus-1-dev systemd libsystemd-dev wget curl zlib1g-dev # Install libdlt RUN git clone https://github.com/GENIVI/dlt-daemon \ && cd /dlt-daemon \ && git checkout v${LIBDLT_VERSION} \ && cd /dlt-daemon \ && cmake CMakeLists.txt \ && make \ && make install FROM debian:buster # Install libdlt.so COPY --from=builder /usr/local/lib /usr/local/lib RUN ldconfig RUN apt-get update \ && apt-get upgrade -y \ && apt-get install -y python3 python3-pip python2 python2-dev git \ && pip3 install --no-cache-dir setuptools tox \ && apt-get clean all \ && rm -rf \ /var/cache/debconf/* \ /var/lib/apt/lists/* \ /var/log/* \ /tmp/* \ /var/tmp/* # vim: set ft=dockerfile : python-dlt-2.0/LICENCE.txt000066400000000000000000000352011354710635300152700ustar00rootroot00000000000000Mozilla Public License Version 2.0 1. Definitions 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. 6. Disclaimer of Warranty Covered Software is provided under this License on an "as is" basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. 7. Limitation of Liability Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. 8. Litigation Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. python-dlt-2.0/Makefile000066400000000000000000000023661354710635300151330ustar00rootroot00000000000000LIBDLT_VERSION=2.18.4 IMAGE=python-dlt/python-dlt-unittest TAG?=latest DK_CMD=docker run --rm -v $(shell pwd):/pydlt -w /pydlt TEST_ARGS?="-e py3,py27" .PHONY: all all: @echo "python-dlt testing commands, libdlt version: ${LIBDLT_VERSION}" @echo " make unit-test -- Run unit tests with tox (Run 'make build-image' the first time)" @echo " make build-image -- Build docker image for the usage of 'make unit-test'" @echo " make clean -- Remove all temporary files" .PHONY: test test: mkdir -p junit_reports; nosetests --no-byte-compile \ --with-xunit --xunit-file=junit_reports/python-dlt_tests.xml \ tests .PHONY: unit-test unit-test: ${DK_CMD} ${IMAGE}:${TAG} tox ${TEST_ARGS} .PHONY: lint lint: ${DK_CMD} ${IMAGE}:${TAG} tox -e lint .PHONY: build-image build-image: docker build --build-arg LIBDLT_VERSION=${LIBDLT_VERSION} \ --tag ${IMAGE}:${TAG} . docker build --build-arg LIBDLT_VERSION=${LIBDLT_VERSION} \ --tag ${IMAGE}:${LIBDLT_VERSION} . .PHONY: clean clean: ifeq (,$(wildcard /.dockerenv)) ${DK_CMD} ${IMAGE}:${TAG} make clean else find . -name "__pycache__" | xargs -n1 rm -rf find . -name "*.pyc" | xargs -n1 rm -rf rm -rf .coverage rm -rf *.egg-info rm -rf .eggs rm -rf junit_reports rm -rf .tox endif python-dlt-2.0/README.rst000066400000000000000000000131501354710635300151530ustar00rootroot00000000000000python-dlt ========== python-dlt is a thin Python ctypes wrapper around libdlt functions. It was primarily created for use with BMW's test execution framework. However, the implementation is independent and the API makes few assumptions about the intended use. Note: This is only tested with libdlt version 2.18.4, later versions might require adaptations. The package will not support previous libdlt versions from python-dlt v2.0. Also only GENIVI DLT daemon produced traces have been tested. Design ------ The code is split up into 3 primary components: * The `core`: This subpackage provides the major chunk of ctypes wrappers for the structures defined in libdlt. It abstracts out the libdlt structures for use by the rest of mgu_dlt. Classes defined here ideally should *not* be used outside of mgu_dlt. The module `core_base.py` provides the default implementation of the classes and the other `core_*.py` modules provide the overrides for the version specific implementations of libdlt. The correct version specific implementation will be loaded automatically at runtime. (the logic for this is in `core/__init__.py`) * The python interface classes: These are defined in `dlt.py`. Most of the classes here derive from their corresponding ctypes class definitions from `core` and provide a more python friendly api/access to the underlying C/ctypes implementations. Ideally, python code using `mgu_dlt` would use these classes rather than the base classes in `core`. * API for tools: This is the component that provides common interfaces required by the tools that use `mgu_dlt`, like the `DLTBroker`, 'DLTLifecycle' etc. These classes do not have equivalents in libdlt and were created based on usage requirements (and as such make assumptions about the manner in which they would be used). If you're reading this document to work on the core or the python classes, it would be a good idea to first understand the design of libdlt itself. This is fairly well documented (look under the `doc/` directory of the `dlt-deamon` code base). Of course the best reference is the code itself. `dlt-daemon` is written in C and is a pretty well laid out, straight forward (ie: not many layers of abstractions), small code base. Makes for good bedtime reading. The rest of this document will describe and demonstrate some of the design of the external API of mgu_dlt. The classes most relevant for users of python-dlt possibly are `DLTClient`, `DLTFile`, `DLTMessage`, `DLTBroker`. The names hopefully make their purpose evident. Here are examples of some interesting ways to use these classes: * DLTFile and DLTMessage:: >>> from dlt import dlt >>> # DLTFile object can be obtained by lading a trace file >>> d = dlt.load("high_full_trace.dlt") >>> print(d.counter_total) # number of DLT messages in the file ... >>> print(d[0]) # messages can be indexed ... >>> for msg in d: # DLTFile object is iterable ... print(msg.apid) # DLTMessage objects have all the attrs ... print(msg.payload_decoded) # one might expect from a DLT frame ... print(msg) # The str() of the DLTMessage closely matches the ... # output of dlt-receive >>> d[0] == d[-1] # DLTMessage objects can be compared to each other >>> d.compare(dict(apid="SYS", citd="JOUR")) # ...or can be compared to an ... # dict of attributes >>> import pickle >>> pickle.dumps(d[0]) # DLTMessage objects are (de)serializable using ... # the pickle protocol (this is to enable sharing ... # of the DLTMessage in a multiprocessing ... # environment) * DLTClient and DLTBroker:: >>> from dlt import dlt >>> c = dlt.DLTClient(servIP="127.0.0.1") # Only initializes the client >>> c.connect() # ...this connects >>> dlt.dltlib.dlt_receiver_receive(ctypes.byref(client.receiver), DLT_RECEIVE_SOCKET) # receives data >>> c.read_message() # reads a single DLTMessage from received data and returns it >>> >>> # more interesting is the DLTBroker class... >>> # - create an instance that initializes a DLTClient. Accepts a filename >>> # where DLT traces would be stored >>> broker = DLTBroker(ip_address="127.0.0.1", filename='/tmp/testing_log.dlt') >>> # needs to be started and stopped explicitly and will create a run a >>> # DLTClient instance in a new *process*. >>> broker.start() >>> broker.stop() >>> >>> # Usually, used in conjunction with the DLTContext class from mtee >>> from mtee.testing.connectors.connector_dlt import DLTContext >>> broker = DLTBroker(ip_address="127.0.0.1", filename="/tmp/testing_log.dlt", verbose=True) >>> ctx = DLTContext(broker, filters=[("SYS", "JOUR")]) >>> broker.start() >>> print(ctx.wait_for(count=10)) >>> Design of DLTBroker ~~~~~~~~~~~~~~~~~~~ The DLTBroker abstracts out the management of 2 (multiprocessing) queues: * The `message_queue`: This queue receives *all* messages from the DLT daemon (via a DLTClient instance, running as a separate process, code in `dlt.dlt_broker_handlers.DLTMessageHandler`) and stores them to a trace file. * The `filter_queue`: This queue instructs the `DLTMessageHandler` which messages would be interesting at runtime, to be filtered and returned (for example, via a request from `DLTContext`). This is run as a separate thread in the `DLTBroker` process. The code for this is in `dlt.dlt_broker_handlers.DLTContextHandler`. python-dlt-2.0/dlt/000077500000000000000000000000001354710635300142475ustar00rootroot00000000000000python-dlt-2.0/dlt/__init__.py000066400000000000000000000027171354710635300163670ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """DLT support module""" import collections import logging import subprocess if not hasattr(subprocess, "TimeoutExpired"): import subprocess32 as subprocess # pylint: disable=import-error LOGGER = logging.getLogger(__name__) ProcessResult = collections.namedtuple("ProcessResult", ("stdout", "stderr", "returncode")) def run_command(command, timeout=60, shell=True): """Run command in a shell and return stdout, stderr and return code :param str|list command: a command to run :param int timeout: timeout for the command :param bool shell: shell switch :returns: process result :rtype: subprocess compatible ProcessResult :raises RuntimeError: If timeout expires. """ process = subprocess.Popen(command, shell=shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: stdout, stderr = process.communicate(timeout=timeout) except subprocess.TimeoutExpired: process.terminate() raise RuntimeError("Timeout %d seconds reached for command '%s'" % (timeout, command)) if isinstance(stdout, bytes): stdout = stdout.decode("utf-8") if isinstance(stderr, bytes): stderr = stderr.decode("utf-8") return ProcessResult(stdout, stderr, process.returncode) python-dlt-2.0/dlt/core/000077500000000000000000000000001354710635300151775ustar00rootroot00000000000000python-dlt-2.0/dlt/core/__init__.py000066400000000000000000000051421354710635300173120ustar00rootroot00000000000000# Copyright (C) 2017. BMW Car IT GmbH. All rights reserved. """Basic ctypes binding to the DLT library""" # pylint: disable=invalid-name,wildcard-import import ctypes import os import six from dlt.core.core_base import * API_VER = None def get_version(loaded_lib): """Return the API version of the loaded libdlt.so library""" global API_VER # pylint: disable=global-statement if API_VER is None: buf = ctypes.create_string_buffer(255) loaded_lib.dlt_get_version(ctypes.byref(buf), 255) # buf would be something like: # DLT Package Version: X.XX.X STABLE, Package Revision: vX.XX.XX build on Jul XX XXXX XX:XX:XX # -SYSTEMD -SYSTEMD_WATCHDOG -TEST -SHM if six.PY3: buf_split = buf.value.decode().split() else: buf_split = buf.value.split() API_VER = buf_split[3] return API_VER def get_api_specific_file(version): """Return specific version api filename""" version_tuple = [int(num) for num in version.split('.')] if version_tuple[-1] != 0: # The mirror version does not exist, try to truncate version_tuple = version_tuple[:-1] + [0] name = 'core_{}.py'.format("".join((str(num) for num in version_tuple))) if not os.path.exists(os.path.join(os.path.dirname(os.path.abspath(__file__)), name)): raise ImportError("No module file: {}".format(name)) return name def check_libdlt_version(api_ver): """Check the version compatibility. python-dlt now only supports to run libdlt 2.18.0 or above. """ ver_info = tuple(int(num) for num in api_ver.split('.')) if ver_info < (2, 18): raise ImportError("python-dlt only supports libdlt v2.18.0 or above") API_VER = get_version(dltlib) check_libdlt_version(API_VER) # Load version specific definitions, if such a file exists, possibly # overriding above definitions # # The intent is to have version specific implementations to be able to # provide declarations *incrementally*. # # For instance if version 2.17.0 introduces new changes in addition to # retaining all changes from 2.16.0, then core_2170.py would import # core_2160.py and declare only version specific changes/overrides. The # loading logic here below should not require changes. # # This allows the implementation below to import just one final module # (as opposed to loading multiple implementations in a specific order) # to provide new/overriding implementations. api_specific_file = get_api_specific_file(API_VER) overrides = __import__('dlt.core.{}'.format(api_specific_file[:-3]), globals(), locals(), ['*']) locals().update(overrides.__dict__) python-dlt-2.0/dlt/core/core_2180.py000066400000000000000000000047641354710635300171660ustar00rootroot00000000000000# Copyright (C) 2019. BMW Car IT GmbH. All rights reserved. """v2.18.0 specific class definitions""" import ctypes class cDltReceiver(ctypes.Structure): # pylint: disable=invalid-name """The structure is used to organise the receiving of data including buffer handling. This structure is used by the corresponding functions. typedef struct { int32_t lastBytesRcvd; /**< bytes received in last receive call */ int32_t bytesRcvd; /**< received bytes */ int32_t totalBytesRcvd; /**< total number of received bytes */ char *buffer; /**< pointer to receiver buffer */ char *buf; /**< pointer to position within receiver buffer */ char *backup_buf; /** pointer to the buffer with partial messages if any **/ int fd; /**< connection handle */ int32_t buffersize; /**< size of receiver buffer */ } DltReceiver; """ _fields_ = [("lastBytesRcvd", ctypes.c_int32), ("bytesRcvd", ctypes.c_int32), ("totalBytesRcvd", ctypes.c_int32), ("buffer", ctypes.POINTER(ctypes.c_char)), ("buf", ctypes.POINTER(ctypes.c_char)), ("backup_buf", ctypes.POINTER(ctypes.c_char)), ("fd", ctypes.c_int), ("buffersize", ctypes.c_int32)] class cDltClient(ctypes.Structure): # pylint: disable=invalid-name """ typedef struct { DltReceiver receiver; /**< receiver pointer to dlt receiver structure */ int sock; /**< sock Connection handle/socket */ char *servIP; /**< servIP IP adress/Hostname of TCP/IP interface */ int port; /**< Port for TCP connections (optional) */ char *serialDevice; /**< serialDevice Devicename of serial device */ char *socketPath; /**< socketPath Unix socket path */ char ecuid[4]; /**< ECUiD */ speed_t baudrate; /**< baudrate Baudrate of serial interface, as speed_t */ DltClientMode mode; /**< mode DltClientMode */ } DltClient; """ _fields_ = [("receiver", cDltReceiver), ("sock", ctypes.c_int), ("servIP", ctypes.c_char_p), ("port", ctypes.c_int), ("serialDevice", ctypes.c_char_p), ("socketPath", ctypes.c_char_p), ("ecuid", ctypes.c_char * 4), ("baudrate", ctypes.c_uint), ("mode", ctypes.c_int)] python-dlt-2.0/dlt/core/core_base.py000066400000000000000000000466521354710635300175100ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. """Default implementation of the ctypes bindings for the DLT library""" import ctypes # pylint: disable=too-few-public-methods,invalid-name,consider-using-ternary dltlib = ctypes.cdll.LoadLibrary('libdlt.so.2') DLT_ID_SIZE = 4 DLT_FILTER_MAX = 30 # Maximum number of filters DLT_HTYP_UEH = 0x01 # use extended header DLT_HTYP_WEID = 0x04 # with ECU ID DLT_HTYP_WSID = 0x08 # with Session ID DLT_HTYP_WTMS = 0x10 # with timestamp DLT_MESSAGE_ERROR_OK = 0 DLT_DAEMON_TEXTSIZE = 10024 DLT_OUTPUT_HEX = 1 DLT_OUTPUT_ASCII = 2 DLT_OUTPUT_MIXED_FOR_PLAIN = 3 DLT_OUTPUT_MIXED_FOR_HTML = 4 DLT_OUTPUT_ASCII_LIMITED = 5 DLT_RETURN_ERROR = -1 DLT_RETURN_OK = 0 DLT_RETURN_TRUE = 1 # DltClientMode from dlt_client.h DLT_CLIENT_MODE_UNDEFINED = -1 DLT_CLIENT_MODE_TCP = 0 DLT_CLIENT_MODE_SERIAL = 1 DLT_CLIENT_MODE_UNIX = 2 # DltReceiverType from dlt_common.h # DltReceiverType is an enum type. These definitions could not be found in shared library (libdlt.so) so # the enum values are defined here. DLT_RECEIVE_SOCKET = 0 DLT_RECEIVE_FD = 1 DLT_TYPE_LOG = 0x00 # Log message type DLT_TYPE_APP_TRACE = 0x01 # Application trace message type DLT_TYPE_NW_TRACE = 0x02 # Network trace message type DLT_TYPE_CONTROL = 0x03 # Control message type DLT_CONTROL_REQUEST = 0x01 DLT_CONTROL_RESPONSE = 0x02 # Response to request message DLT_CONTROL_TIME = 0x03 DLT_MSIN_MSTP_SHIFT = 1 # shift right offset to get mstp value DLT_MSIN_MTIN_SHIFT = 4 # shift right offset to get mtin value DLT_MSIN_MSTP = 0x0e # message type DLT_MSIN_MTIN = 0xf0 # message type info DLT_MSIN_VERB = 0x01 # verbose mode DLT_MSIN_CONTROL_RESPONSE = (DLT_TYPE_CONTROL << DLT_MSIN_MSTP_SHIFT) | (DLT_CONTROL_RESPONSE << DLT_MSIN_MTIN_SHIFT) # dlt_protocol.h DLT_SERVICE_ID_GET_SOFTWARE_VERSION = 0x13 # Service ID: Get software version DLT_SERVICE_ID_UNREGISTER_CONTEXT = 0xf01 # Service ID: Message unregister context DLT_SERVICE_ID_CONNECTION_INFO = 0xf02 # Service ID: Message connection info DLT_SERVICE_ID_TIMEZONE = 0xf03 # Service ID: Timezone DLT_SERVICE_ID_MARKER = 0xf04 # Service ID: Marker DLT_CONNECTION_STATUS_DISCONNECTED = 0x01 # Client is disconnected DLT_CONNECTION_STATUS_CONNECTED = 0x02 # Client is connected DLT_TYPE_INFO_TYLE = 0x0000000f # Length of standard data: 1 = 8bit, 2 = 16bit, 3 = 32 bit, 4 = 64 bit, 5 = 128 bit DLT_TYPE_INFO_BOOL = 0x00000010 # Boolean data DLT_TYPE_INFO_SINT = 0x00000020 # Signed integer data DLT_TYPE_INFO_UINT = 0x00000040 # Unsigned integer data DLT_TYPE_INFO_FLOA = 0x00000080 # Float data DLT_TYPE_INFO_ARAY = 0x00000100 # Array of standard types DLT_TYPE_INFO_STRG = 0x00000200 # String DLT_TYPE_INFO_RAWD = 0x00000400 # Raw data DLT_TYPE_INFO_VARI = 0x00000800 # Set, if additional information to a variable is available DLT_TYPE_INFO_FIXP = 0x00001000 # Set, if quantization and offset are added DLT_TYPE_INFO_TRAI = 0x00002000 # Set, if additional trace information is added DLT_TYPE_INFO_STRU = 0x00004000 # Struct DLT_TYPE_INFO_SCOD = 0x00038000 # coding of the type string: 0 = ASCII, 1 = UTF-8 DLT_SCOD_ASCII = 0x00000000 DLT_SCOD_UTF8 = 0x00008000 DLT_SCOD_HEX = 0x00010000 DLT_SCOD_BIN = 0x00018000 DLT_TYLE_8BIT = 0x00000001 DLT_TYLE_16BIT = 0x00000002 DLT_TYLE_32BIT = 0x00000003 DLT_TYLE_64BIT = 0x00000004 DLT_TYLE_128BIT = 0x00000005 DLT_DAEMON_TCP_PORT = 3490 DLT_CLIENT_RCVBUFSIZE = 10024 # Size of client receive buffer from dlt_client_cfg.h # dlt-viever/qdltbase.cpp qDltMessageType = [b"log", b"app_trace", b"nw_trace", b"control", b"", b"", b"", b""] qDltLogInfo = [b"", b"fatal", b"error", b"warn", b"info", b"debug", b"verbose", b"", b"", b"", b"", b"", b"", b"", b"", b""] qDltTraceType = [b"", b"variable", b"func_in", b"func_out", b"state", b"vfb", b"", b"", b"", b"", b"", b"", b"", b"", b"", b""] qDltNwTraceType = [b"", b"ipc", b"can", b"flexray", b"most", b"vfb", b"", b"", b"", b"", b"", b"", b"", b"", b"", b""] qDltControlType = [b"", b"request", b"response", b"time", b"", b"", b"", b"", b"", b"", b"", b"", b"", b"", b"", b""] cqDltMode = [b"non-verbose", b"verbose"] qDltEndianness = [b"little-endian", b"big-endian"] cqDltTypeInfo = [b"String", b"Bool", b"SignedInteger", b"UnsignedInteger", b"Float", b"RawData", b"TraceInfo", b"Utf8String"] qDltCtrlServiceId = [b"", b"set_log_level", b"set_trace_status", b"get_log_info", b"get_default_log_level", b"store_config", b"reset_to_factory_default", b"set_com_interface_status", b"set_com_interface_max_bandwidth", b"set_verbose_mode", b"set_message_filtering", b"set_timing_packets", b"get_local_time", b"use_ecu_id", b"use_session_id", b"use_timestamp", b"use_extended_header", b"set_default_log_level", b"set_default_trace_status", b"get_software_version", b"message_buffer_overflow"] qDltCtrlReturnType = [b"ok", b"not_supported", b"error", b"3", b"4", b"5", b"6", b"7", b"no_matching_context_id"] class cDltServiceConnectionInfo(ctypes.Structure): """ typedef struct { uint32_t service_id; /**< service ID */ uint8_t status; /**< reponse status */ uint8_t state; /**< new state */ char comid[DLT_ID_SIZE]; /**< communication interface */ } PACKED DltServiceConnectionInfo; """ _fields_ = [("service_id", ctypes.c_uint32), ("status", ctypes.c_uint8), ("state", ctypes.c_uint8), ("comid", DLT_ID_SIZE * ctypes.c_byte)] _pack_ = 1 class MessageMode(object): """Default properties for the DLTMessage""" # pylint: disable=no-member @property def use_extended_header(self): """Returns True if the DLTMessage has extended header""" return self.standardheader.htyp & DLT_HTYP_UEH @property def is_mode_verbose(self): """Returns True if the DLTMessage is set to verbose mode""" return self.extendedheader.msin & DLT_MSIN_VERB @property def mode_string(self): """Returns 'verbose' if DLTMessage is set to verbose mode. Otherwise 'non-verbose'""" return b'verbose' if self.is_mode_verbose else b'non-verbose' @property def is_mode_non_verbose(self): """Returns True if the DLTMessage is set to non-verbose mode""" return not self.is_mode_verbose @property def is_type_control(self): """Returns True if the DLTMessage type is control""" return self.standardheader.htyp & DLT_TYPE_CONTROL @property def is_type_control_response(self): """Returns True if the DLTMessage type is control response""" return self.standardheader.htyp & DLT_MSIN_CONTROL_RESPONSE @property def message_id(self): """Returns message ID of the DLTMessage""" if self.is_mode_non_verbose and (self.datasize >= 4): ptr_int = ctypes.cast(self.databuffer, ctypes.POINTER(ctypes.c_uint32)) mid = ptr_int[0] return mid return 0 @property def message_id_string(self): """Returns string representation of message ID""" mid = self.message_id return qDltCtrlServiceId[mid] if 0 <= mid <= len(qDltCtrlServiceId) else b"" @property def ctrl_service_id(self): """Returns service ID of the DLTMessage""" service_id = 0 if self.is_type_control and self.datasize >= 4: ptr_int = ctypes.cast(self.databuffer, ctypes.POINTER(ctypes.c_uint32)) service_id = ptr_int[0] return service_id @property def ctrl_service_id_string(self): """Returns string representation of service ID""" sid = self.ctrl_service_id if sid == DLT_SERVICE_ID_UNREGISTER_CONTEXT: return b"unregister_context" if sid == DLT_SERVICE_ID_CONNECTION_INFO: return b"connection_info" if sid == DLT_SERVICE_ID_TIMEZONE: return b"timezone" if sid == DLT_SERVICE_ID_MARKER: return b"marker" return qDltCtrlServiceId[sid] if sid <= 20 else b"" @property def ctrl_return_type(self): """Returns ctrl type of the DLTMessage""" return_type = 0 if self.is_type_control and (self.is_type_control_response and self.datasize >= 6): return_type = self.databuffer[4] return return_type @property def ctrl_return_type_string(self): """Returns string representation of ctrl type""" return qDltCtrlReturnType[self.ctrl_return_type] if self.ctrl_return_type <= 8 else b"" @property def type(self): """Returns message type of the DLTMessage""" return (self.extendedheader.msin & DLT_MSIN_MSTP) >> DLT_MSIN_MSTP_SHIFT @property def type_string(self): """Returns string representation of the message type""" mtype = self.type return qDltMessageType[mtype] if 0 <= mtype <= 7 else b"" @property def subtype(self): """Returns message subtype of the DLTMessage""" return (self.extendedheader.msin & DLT_MSIN_MTIN) >> DLT_MSIN_MTIN_SHIFT @property def subtype_string(self): """Returns string representation of the message subtype""" mtype = self.type msubtype = self.subtype if mtype == DLT_TYPE_LOG: return qDltLogInfo[msubtype] if 0 <= msubtype <= 7 else b"" if mtype == DLT_TYPE_APP_TRACE: return qDltTraceType[msubtype] if 0 <= msubtype <= 7 else b"" if mtype == DLT_TYPE_NW_TRACE: return qDltNwTraceType[msubtype] if 0 <= msubtype <= 7 else b"" if mtype == DLT_TYPE_CONTROL: return qDltControlType[msubtype] if 0 <= msubtype <= 7 else b"" return b"" @property def payload_decoded(self): """Decode the payload data :returns: Payload data :rtype: str """ text = b"" if self.is_mode_non_verbose and not self.is_type_control and self.noar == 0: buf = ctypes.create_string_buffer('\000' * DLT_DAEMON_TEXTSIZE) dltlib.dlt_message_payload(ctypes.byref(self), buf, DLT_DAEMON_TEXTSIZE, DLT_OUTPUT_ASCII, self.verbose) return b"[{}] #{}#".format(self.message_id_string, buf.value[4:]) if self.type == DLT_TYPE_CONTROL and self.subtype == DLT_CONTROL_RESPONSE: if self.ctrl_service_id == DLT_SERVICE_ID_MARKER: return b"MARKER" text = b"[%s %s] " % (self.ctrl_service_id_string, self.ctrl_return_type_string) service_id = self.ctrl_service_id if self.ctrl_service_id == DLT_SERVICE_ID_GET_SOFTWARE_VERSION: text += ctypes.string_at(self.databuffer, self.datasize)[9:] elif self.ctrl_service_id == DLT_SERVICE_ID_CONNECTION_INFO: if self.datasize == ctypes.sizeof(cDltServiceConnectionInfo): conn_info = cDltServiceConnectionInfo.from_buffer(bytearray(self.databuffer[:self.datasize])) if conn_info.state == DLT_CONNECTION_STATUS_DISCONNECTED: text += b"disconnected" elif conn_info.state == DLT_CONNECTION_STATUS_CONNECTED: text += b"connected" else: text += b"unknown" text += b" " + ctypes.string_at(conn_info.comid, DLT_ID_SIZE) else: text += ctypes.string_at(self.databuffer, self.datasize)[5:256+5] elif service_id == DLT_SERVICE_ID_TIMEZONE: text += ctypes.string_at(self.databuffer, self.datasize)[5:256+5] else: buf = ctypes.create_string_buffer(b'\000' * DLT_DAEMON_TEXTSIZE) dltlib.dlt_message_payload(ctypes.byref(self), buf, DLT_DAEMON_TEXTSIZE, DLT_OUTPUT_ASCII, self.verbose) text += buf.value return text if self.type == DLT_TYPE_CONTROL: return b"[{}] {}".format(self.ctrl_service_id_string, ctypes.string_at(self.databuffer, self.datasize)[4:256+4]) buf = ctypes.create_string_buffer(b'\000' * DLT_DAEMON_TEXTSIZE) dltlib.dlt_message_payload(ctypes.byref(self), buf, DLT_DAEMON_TEXTSIZE, DLT_OUTPUT_ASCII, self.verbose) return buf.value class cDltStorageHeader(ctypes.Structure): """ /** * The structure of the DLT file storage header. This header is used before each stored DLT message. */ typedef struct { char pattern[DLT_ID_SIZE]; /**< This pattern should be DLT0x01 */ uint32_t seconds; /**< seconds since 1.1.1970 */ int32_t microseconds; /**< Microseconds */ char ecu[DLT_ID_SIZE]; /**< The ECU id is added, if it is not already in the DLT message itself */ } PACKED DltStorageHeader; """ _fields_ = [("pattern", ctypes.c_char * DLT_ID_SIZE), ("seconds", ctypes.c_uint32), ("microseconds", ctypes.c_int32), ("ecu", ctypes.c_char * DLT_ID_SIZE)] _pack_ = 1 def __reduce__(self): return (cDltStorageHeader, (self.pattern, self.seconds, self.microseconds, self.ecu)) class cDltStandardHeader(ctypes.BigEndianStructure): """The structure of the DLT standard header. This header is used in each DLT message. typedef struct { uint8_t htyp; /**< This parameter contains several informations, see definitions below */ uint8_t mcnt; /**< The message counter is increased with each sent DLT message */ uint16_t len; /**< Length of the complete message, without storage header */ } PACKED DltStandardHeader; """ _fields_ = [("htyp", ctypes.c_uint8), ("mcnt", ctypes.c_uint8), ("len", ctypes.c_ushort)] _pack_ = 1 def __reduce__(self): return (cDltStandardHeader, (self.htyp, self.mcnt, self.len)) class cDltStandardHeaderExtra(ctypes.Structure): """The structure of the DLT extra header parameters. Each parameter is sent only if enabled in htyp. typedef struct { char ecu[DLT_ID_SIZE]; /**< ECU id */ uint32_t seid; /**< Session number */ uint32_t tmsp; /**< Timestamp since system start in 0.1 milliseconds */ } PACKED DltStandardHeaderExtra; """ _fields_ = [("ecu", ctypes.c_char * DLT_ID_SIZE), ("seid", ctypes.c_uint32), ("tmsp", ctypes.c_uint32)] _pack_ = 1 def __reduce__(self): return (cDltStandardHeaderExtra, (self.ecu, self.seid, self.tmsp)) class cDltExtendedHeader(ctypes.Structure): """The structure of the DLT extended header. This header is only sent if enabled in htyp parameter. typedef struct { uint8_t msin; /**< messsage info */ uint8_t noar; /**< number of arguments */ char apid[DLT_ID_SIZE]; /**< application id */ char ctid[DLT_ID_SIZE]; /**< context id */ } PACKED DltExtendedHeader; """ _fields_ = [("msin", ctypes.c_uint8), ("noar", ctypes.c_uint8), ("apid", ctypes.c_char * DLT_ID_SIZE), ("ctid", ctypes.c_char * DLT_ID_SIZE)] _pack_ = 1 def __reduce__(self): return (cDltExtendedHeader, (self.msin, self.noar, self.apid, self.ctid)) class cDLTMessage(ctypes.Structure): """The structure of the DLT messages. typedef struct sDltMessage { /* flags */ int8_t found_serialheader; /* offsets */ int32_t resync_offset; /* size parameters */ int32_t headersize; /**< size of complete header including storage header */ int32_t datasize; /**< size of complete payload */ /* buffer for current loaded message */ uint8_t headerbuffer[sizeof(DltStorageHeader)+ sizeof(DltStandardHeader)+sizeof(DltStandardHeaderExtra)+sizeof(DltExtendedHeader)]; /**< buffer for loading complete header */ uint8_t *databuffer; /**< buffer for loading payload */ int32_t databuffersize; /* header values of current loaded message */ DltStorageHeader *storageheader; /**< pointer to storage header of current loaded header */ DltStandardHeader *standardheader; /**< pointer to standard header of current loaded header */ DltStandardHeaderExtra headerextra; /**< extra parameters of current loaded header */ DltExtendedHeader *extendedheader; /**< pointer to extended of current loaded header */ } DltMessage; """ _fields_ = [("found_serialheader", ctypes.c_int8), ("resync_offset", ctypes.c_int32), ("headersize", ctypes.c_int32), ("datasize", ctypes.c_int32), ("headerbuffer", ctypes.c_uint8 * (ctypes.sizeof(cDltStorageHeader) + ctypes.sizeof(cDltStandardHeader) + ctypes.sizeof(cDltStandardHeaderExtra) + ctypes.sizeof(cDltExtendedHeader))), ("databuffer", ctypes.POINTER(ctypes.c_uint8)), ("databuffersize", ctypes.c_uint32), ("p_storageheader", ctypes.POINTER(cDltStorageHeader)), ("p_standardheader", ctypes.POINTER(cDltStandardHeader)), ("headerextra", cDltStandardHeaderExtra), ("p_extendedheader", ctypes.POINTER(cDltExtendedHeader))] class cDltReceiver(ctypes.Structure): """The structure is used to organise the receiving of data including buffer handling. This structure is used by the corresponding functions. typedef struct { int32_t lastBytesRcvd; /**< bytes received in last receive call */ int32_t bytesRcvd; /**< received bytes */ int32_t totalBytesRcvd; /**< total number of received bytes */ char *buffer; /**< pointer to receiver buffer */ char *buf; /**< pointer to position within receiver buffer */ int fd; /**< connection handle */ int32_t buffersize; /**< size of receiver buffer */ } DltReceiver; """ _fields_ = [("lastBytesRcvd", ctypes.c_int32), ("bytesRcvd", ctypes.c_int32), ("totalBytesRcvd", ctypes.c_int32), ("buffer", ctypes.POINTER(ctypes.c_char)), ("buf", ctypes.POINTER(ctypes.c_char)), ("fd", ctypes.c_int), ("buffersize", ctypes.c_int32)] class cDltClient(ctypes.Structure): """ typedef struct { DltReceiver receiver; /**< receiver pointer to dlt receiver structure */ int sock; /**< sock Connection handle/socket */ char *servIP; /**< servIP IP adress/Hostname of TCP/IP interface */ char *serialDevice; /**< serialDevice Devicename of serial device */ speed_t baudrate; /**< baudrate Baudrate of serial interface, as speed_t */ int serial_mode; /**< serial_mode Serial mode enabled =1, disabled =0 */ } DltClient; """ _fields_ = [("receiver", cDltReceiver), ("sock", ctypes.c_int), ("servIP", ctypes.c_char_p), ("serialDevice", ctypes.c_char_p), ("baudrate", ctypes.c_int), ("serial_mode", ctypes.c_int)] python-dlt-2.0/dlt/dlt.py000066400000000000000000001302021354710635300154020ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """Pure Python implementation of DLT library""" # pylint: disable=too-many-lines from __future__ import absolute_import import ctypes import logging import os import re import socket import struct import time import threading import six from dlt.core import ( dltlib, DLT_ID_SIZE, DLT_HTYP_WEID, DLT_HTYP_WSID, DLT_HTYP_WTMS, DLT_HTYP_UEH, DLT_RETURN_OK, DLT_RETURN_ERROR, DLT_RETURN_TRUE, DLT_FILTER_MAX, DLT_MESSAGE_ERROR_OK, cDltExtendedHeader, cDltClient, MessageMode, cDLTMessage, cDltStorageHeader, cDltStandardHeader, DLT_TYPE_INFO_UINT, DLT_TYPE_INFO_SINT, DLT_TYPE_INFO_STRG, DLT_TYPE_INFO_SCOD, DLT_TYPE_INFO_TYLE, DLT_TYPE_INFO_VARI, DLT_TYPE_INFO_RAWD, DLT_SCOD_ASCII, DLT_SCOD_UTF8, DLT_TYLE_8BIT, DLT_TYLE_16BIT, DLT_TYLE_32BIT, DLT_TYLE_64BIT, DLT_TYLE_128BIT, DLT_DAEMON_TCP_PORT, DLT_CLIENT_RCVBUFSIZE, DLT_RECEIVE_SOCKET, ) from dlt.helpers import bytes_to_str try: # Use xrange by default on Python 2 range = xrange # pylint: disable=redefined-builtin,undefined-variable,invalid-name except Exception: # pylint: disable=broad-except pass MAX_LOG_IN_ROW = 3 # Return value for DLTFilter.add() - exceeded maximum number of filters MAX_FILTER_REACHED = 1 # Return value for DLTFilter.add() - specified filter already exists REPEATED_FILTER = 2 logger = logging.getLogger(__name__) # pylint: disable=invalid-name DLT_EMPTY_FILE_ERROR = "DLT TRACE FILE IS EMPTY" cDLT_FILE_NOT_OPEN_ERROR = "Could not open DLT Trace file (libdlt)" # pylint: disable=invalid-name class cached_property(object): # pylint: disable=invalid-name """ A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the property. Copyright: Marcel Hellkamp Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76 Licence: MIT """ # noqa def __init__(self, func): self.__doc__ = getattr(func, '__doc__') self.func = func def __get__(self, obj, cls): if obj is None: return self value = obj.__dict__[self.func.__name__] = self.func(obj) return value class DLTFilter(ctypes.Structure): """Structure to store filter parameters. ID are maximal four characters. Unused values are filled with zeros. If every value as filter is valid, the id should be empty by having only zero values. typedef struct { char apid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< application id */ char ctid[DLT_FILTER_MAX][DLT_ID_SIZE]; /**< context id */ int counter; /**< number of filters */ } DltFilter; """ _fields_ = [("apid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX), ("ctid", (ctypes.c_char * DLT_ID_SIZE) * DLT_FILTER_MAX), ("counter", ctypes.c_int)] verbose = 0 def __init__(self, **kwords): self.verbose = kwords.pop("verbose", 0) if dltlib.dlt_filter_init(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not initialize DLTFilter") super(DLTFilter, self).__init__(**kwords) def __del__(self): if dltlib.dlt_filter_free(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not cleanup DLTFilter") def add(self, apid, ctid): """Add new filter pair""" if six.PY3: if isinstance(apid, str): apid = bytes(apid, "ascii") if isinstance(ctid, str): ctid = bytes(ctid, "ascii") if dltlib.dlt_filter_add(ctypes.byref(self), apid or b"", ctid or b"", self.verbose) == DLT_RETURN_ERROR: if self.counter >= DLT_FILTER_MAX: logger.error("Maximum number (%d) of allowed filters reached, ignoring filter!\n", DLT_FILTER_MAX) return MAX_FILTER_REACHED logger.debug("Filter ('%s', '%s') already exists", apid, ctid) return REPEATED_FILTER return 0 def __repr__(self): """return the 'official' string representation of an object""" apids = [ctypes.string_at(entry[:DLT_ID_SIZE]) for entry in self.apid] ctids = [ctypes.string_at(entry[:DLT_ID_SIZE]) for entry in self.ctid] return str(list(zip(apids[:self.counter], ctids[:self.counter]))) def __nonzero__(self): """Truth value testing""" return self.counter > 0 __bool__ = __nonzero__ class Payload(object): """Payload object encapsulates the payload decoding and list-like access to arguments""" def __init__(self, message): self._params = None self._noar = message.noar self._buf = ctypes.string_at(message.databuffer, message.datasize) def __getitem__(self, index): """Accessing the payload item as a list""" if index < 0 or index > self._noar: return IndexError() # we have parsed it already - just return the item if self._params is not None: return self._params[index] self._parse_payload() return self._params[index] def _parse_payload(self): # pylint: disable=too-many-branches,too-many-statements """Parse the payload into list of arguments""" self._params = [] offset = 0 for _ in range(self._noar): type_info = struct.unpack_from("I", self._buf, offset)[0] offset += struct.calcsize("I") def get_scod(type_info): """Helper function""" return type_info & DLT_TYPE_INFO_SCOD value = None if type_info & DLT_TYPE_INFO_STRG: if (get_scod(type_info) == DLT_SCOD_ASCII) or (get_scod(type_info) == DLT_SCOD_UTF8): length = struct.unpack_from("H", self._buf, offset)[0] offset += struct.calcsize("H") value = self._buf[offset:offset + length - 1] # strip the string terminating char \x00 offset += length elif type_info & DLT_TYPE_INFO_UINT: if type_info & DLT_TYPE_INFO_VARI: pass tyle = type_info & DLT_TYPE_INFO_TYLE if tyle == DLT_TYLE_8BIT: value = struct.unpack_from("B", self._buf, offset)[0] offset += 1 elif tyle == DLT_TYLE_16BIT: value = struct.unpack_from("H", self._buf, offset)[0] offset += 2 elif tyle == DLT_TYLE_32BIT: value = struct.unpack_from("I", self._buf, offset)[0] offset += 4 elif tyle == DLT_TYLE_64BIT: value = struct.unpack_from("Q", self._buf, offset)[0] offset += 8 elif tyle == DLT_TYLE_128BIT: raise TypeError("reading 128BIT values not supported") elif type_info & DLT_TYPE_INFO_SINT: if type_info & DLT_TYPE_INFO_VARI: pass tyle = type_info & DLT_TYPE_INFO_TYLE if tyle == DLT_TYLE_8BIT: value = struct.unpack_from("b", self._buf, offset)[0] offset += 1 elif tyle == DLT_TYLE_16BIT: value = struct.unpack_from("h", self._buf, offset)[0] offset += 2 elif tyle == DLT_TYLE_32BIT: value = struct.unpack_from("i", self._buf, offset)[0] offset += 4 elif tyle == DLT_TYLE_64BIT: value = struct.unpack_from("q", self._buf, offset)[0] offset += 8 elif tyle == DLT_TYLE_128BIT: raise TypeError("reading 128BIT values not supported") elif type_info & DLT_TYPE_INFO_RAWD: if type_info & DLT_TYPE_INFO_VARI: pass length = struct.unpack_from("H", self._buf, offset)[0] offset += struct.calcsize("H") value = self._buf[offset:offset + length] offset += length else: value = "ERROR" self._params.append(value) def __len__(self): """Return number of parsed parameters""" if self._params is None: self._parse_payload() return len(self._params) class DLTMessage(cDLTMessage, MessageMode): """Python wrapper class for the cDLTMessage structure""" verbose = 0 # object is not initialized if the message is loaded from a file initialized_as_object = False def __init__(self, *args, **kwords): self.initialized_as_object = True self.verbose = kwords.pop("verbose", 0) if self.verbose: logger.debug("DLTMessage._init_(%s)", kwords) self.lifecycle = None if dltlib.dlt_message_init(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not initialize DLTMessage") super(DLTMessage, self).__init__(*args, **kwords) def __reduce__(self): """Pickle serialization API This method is called by the pickle module to serialize objects that it cannot automatically serialize. """ # copy the data from the databuffer pointer into an array databuffer = ctypes.ARRAY(ctypes.c_uint8, self.datasize)() ctypes.memmove(databuffer, self.databuffer, self.datasize) init_args = (self.found_serialheader, self.resync_offset, self.headersize, self.datasize) state_dict = {'headerbuffer': bytearray(self.headerbuffer), 'databuffer': bytearray(databuffer), 'databuffersize': self.databuffersize, 'storageheader': self.storageheader, 'standardheader': self.standardheader, 'headerextra': self.headerextra, 'extendedheader': self.extendedheader, } return (DLTMessage, init_args, state_dict) # pylint: disable=attribute-defined-outside-init def __setstate__(self, state): """Pickle deserialization API This method is called by the pickle module to populate a deserialized object's state after it has been created. """ self.databuffersize = state['databuffersize'] self.p_storageheader.contents = state['storageheader'] self.p_standardheader.contents = state['standardheader'] self.headerextra = state['headerextra'] self.p_extendedheader.contents = state['extendedheader'] # - populate databuffer databuffer = ctypes.ARRAY(ctypes.c_uint8, self.datasize)() for index, byte in enumerate(state['databuffer']): databuffer[index] = byte self.databuffer = databuffer # - populate headerbuffer for index, byte in enumerate(state['headerbuffer']): self.headerbuffer[index] = byte # - This is required because we are not calling # dlt_message_init() so we do not need to call # dlt_message_free() self.initialized_as_object = False @staticmethod def from_bytes(data): """Create a class instance from a byte string in DLT storage format""" msg = DLTMessage() storageheader, remainder = msg.extract_storageheader(data) buf = ctypes.create_string_buffer(remainder) dltlib.dlt_message_read(ctypes.byref(msg), ctypes.cast(buf, ctypes.POINTER(ctypes.c_uint8)), ctypes.c_uint(len(remainder)), 0, # resync 0) # verbose msg.p_storageheader.contents = storageheader msg.initialized_as_object = False return msg def to_bytes(self): """Create DLT storage format bytes from DLTMessage instance""" return ctypes.string_at(self.headerbuffer, self.headersize) + ctypes.string_at(self.databuffer, self.datasize) def __copy__(self): """Create a copy of the message""" return DLTMessage.from_bytes(self.to_bytes()) @staticmethod def extract_storageheader(data): """Split binary message data into storage header and remainder""" header = data[0:ctypes.sizeof(cDltStorageHeader)] # pylint: disable=no-member return (cDltStorageHeader.from_buffer_copy(header), data[ctypes.sizeof(cDltStorageHeader):]) @staticmethod def extract_sort_data(data): """Extract timestamp, message length, apid, ctid from a bytestring in DLT storage format (speed optimized)""" htyp_data = ord(chr(data[16])) if six.PY3 else ord(data[16]) len_data = data[19:17:-1] len_value = ctypes.cast(len_data, ctypes.POINTER(ctypes.c_ushort)).contents.value + 16 apid = b"" ctid = b"" tmsp_value = 0.0 bytes_offset = 0 # We know where data will be in the message, but ... if not htyp_data & DLT_HTYP_WEID: # if there is no ECU ID and/or Session ID, then it will be earlier bytes_offset -= 4 if not htyp_data & DLT_HTYP_WSID: bytes_offset -= 4 if htyp_data & DLT_HTYP_WTMS: tmsp_base = 31 + bytes_offset # Typical timestamp end offset tmsp_data = data[tmsp_base:tmsp_base - 4:-1] tmsp_value = ctypes.cast(tmsp_data, ctypes.POINTER(ctypes.c_uint32)).contents.value / 10000.0 if htyp_data & DLT_HTYP_UEH: apid_base = 38 + bytes_offset # Typical APID end offset apid = data[apid_base - 4:apid_base].rstrip(b"\x00") ctid = data[apid_base:apid_base + 4].rstrip(b"\x00") apid = bytes_to_str(apid) ctid = bytes_to_str(ctid) return tmsp_value, len_value, apid, ctid def __del__(self): if self.initialized_as_object is True: if dltlib.dlt_message_free(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not free DLTMessage") @property def storageheader(self): """Workaround to get rid of need to call .contents""" try: return self.p_storageheader.contents except ValueError: return None @property def standardheader(self): """Workaround to get rid of need to call .contents""" return self.p_standardheader.contents @property def extendedheader(self): """Workaround to get rid of need to call .contents""" try: return self.p_extendedheader.contents except ValueError: return None def __eq__(self, other): """Equal test - not comparing storage header (contains timestamps)""" header1 = ctypes.string_at(self.headerbuffer, self.headersize)[ctypes.sizeof(cDltStorageHeader):] header2 = ctypes.string_at(other.headerbuffer, other.headersize)[ctypes.sizeof(cDltStorageHeader):] data1 = ctypes.string_at(self.databuffer, self.datasize) data2 = ctypes.string_at(other.databuffer, other.datasize) return header1 == header2 and data1 == data2 def compare(self, other=None): # pylint: disable=too-many-return-statements,too-many-branches """Compare messages by given attributes :param [DLTMessage|DLTFilter|dict] other: DLTMessage object (or DLTFilter or a dict with selected keys) to compare with. Use DLTFilter object with APID,CTID pairs for the best performance. :returns: True if all attributes match or False if any of the given attributes differs :rtype: bool :raises TypeError: if other is neither DLTMessage nor a dictionary Example: message.compare(other=message2) message.compare(message2) message.compare(other=dict(apid="AP1", ctid="CT1")) message.compare(dict(apid="AP1", ctid="CT1")) message.compare(dict(apid=re.compile(r"^A.*")) # match all messages which apid starting with A message.compare(dict(apid="AP1", ctid="CT1", payload_decoded=re.compile(r".connected.*"))) """ if hasattr(other, "apid") and hasattr(other, "ctid") and hasattr(other, "payload_decoded"): # other is DLTMessage - full compare return self.apid == other.apid and self.ctid == other.ctid and self.__eq__(other) # pylint: disable=protected-access if hasattr(other, "_fields_") and [x[0] for x in other._fields_] == ["apid", "ctid", "counter"]: # other id DLTFilter return dltlib.dlt_message_filter_check(ctypes.byref(self), ctypes.byref(other), 0) if not isinstance(other, dict): raise TypeError("other must be instance of mgu_dlt.dlt.DLTMessage, mgu_dlt.dlt.DLTFilter or a dictionary" " found: {}".format(type(other))) re_pattern_type = type(re.compile(r"type")) other = other.copy() apid = other.get("apid", None) if apid and not isinstance(apid, re_pattern_type) and self.apid != apid: return False ctid = other.get("ctid", None) if ctid and not isinstance(ctid, re_pattern_type) and self.ctid != ctid: return False for key, val in other.items(): if val is None: continue key = key.rsplit(".", 1)[-1] # In case the obsolete "extendedheader.apid" notation is used msg_val = getattr(self, key, b"") if not msg_val: return False if isinstance(val, re_pattern_type): if not val.search(msg_val): return False elif msg_val != val: return False return True def __str__(self): """Construct DLTViewer-like string""" out = [time.asctime(time.gmtime(self.storage_timestamp))] if self.headerextra: out.append(self.headerextra.tmsp / 10000.0) out += [self.standardheader.mcnt, self.storageheader.ecu] if self.extendedheader: out += [self.extendedheader.apid, self.extendedheader.ctid] if self.headerextra: out.append(self.headerextra.seid) out += [self.type_string, self.subtype_string, self.mode_string, self.noar, self.payload_decoded] return " ".join(bytes_to_str(item) for item in out) # convenient access to import DLT message attributes # no need to remember in which header are those attrs defined @cached_property def ecuid(self): # pylint: disable=invalid-overridden-method """Get the ECU ID :returns: ECU ID :rtype: str """ return bytes_to_str(self.storageheader.ecu or self.headerextra.ecu) @cached_property def mcnt(self): # pylint: disable=invalid-overridden-method """Get the message counter index :returns: message index :rtype: int """ return int(self.standardheader.mcnt) @cached_property def seid(self): # pylint: disable=invalid-overridden-method """Get the Session ID if WSID is set in the message type, otherwise 0 :returns: Session ID :rtype: int """ return int(self.headerextra.seid) if (self.standardheader.htyp & DLT_HTYP_WSID) else 0 @cached_property def tmsp(self): # pylint: disable=invalid-overridden-method """Get the timestamp :returns: timestamp :rtype: float [s] """ return (self.headerextra.tmsp / 10000.0) if (self.standardheader.htyp & DLT_HTYP_WTMS) else 0 @cached_property def apid(self): # pylint: disable=invalid-overridden-method """Get the Application ID :returns: Application ID :rtype: str """ return bytes_to_str(self.extendedheader.apid if self.extendedheader else "") @cached_property def ctid(self): # pylint: disable=invalid-overridden-method """Get the Context ID :returns: Context ID :rtype: str """ return bytes_to_str(self.extendedheader.ctid if self.extendedheader else "") @cached_property def noar(self): # pylint: disable=invalid-overridden-method """Get the number of arguments :returns: Context ID :rtype: str """ if self.use_extended_header and self.is_mode_verbose: return self.extendedheader.noar return 0 @cached_property def payload(self): # pylint: disable=invalid-overridden-method """Get the payload object :returns: Payload object :rtype: Payload """ return Payload(self) @cached_property def payload_decoded(self): # pylint: disable=invalid-overridden-method """Get the payload string :returns: Payload string :rtype: str """ return bytes_to_str(super(DLTMessage, self).payload_decoded) @cached_property def storage_timestamp(self): # pylint: disable=invalid-overridden-method """Get the storage header timestamp in seconds :returns: storage header timestamp :rtype: float """ return float("{}.{}".format(self.storageheader.seconds, self.storageheader.microseconds)) class cDLTFile(ctypes.Structure): # pylint: disable=invalid-name """The structure to organise the access to DLT files. This structure is used by the corresponding functions. typedef struct sDltFile { /* file handle and index for fast access */ FILE *handle; /**< file handle of opened DLT file */ long *index; /**< file positions of all DLT messages for fast access to file, only filtered messages */ /* size parameters */ int32_t counter; /**< number of messages in DLT file with filter */ int32_t counter_total; /**< number of messages in DLT file without filter */ int32_t position; /**< current index to message parsed in DLT file starting at 0 */ long file_length; /**< length of the file */ long file_position; /**< current position in the file */ /* error counters */ int32_t error_messages; /**< number of incomplete DLT messages found during file parsing */ /* filter parameters */ DltFilter *filter; /**< pointer to filter list. Zero if no filter is set. */ int32_t filter_counter; /**< number of filter set */ /* current loaded message */ DltMessage msg; /**< pointer to message */ } DltFile; """ _fields_ = [("handle", ctypes.POINTER(ctypes.c_int)), ("index", ctypes.POINTER(ctypes.c_long)), ("counter", ctypes.c_int32), ("counter_total", ctypes.c_int32), ("position", ctypes.c_int32), ("file_length", ctypes.c_long), ("file_position", ctypes.c_long), ("error_messages", ctypes.c_int32), ("filter", ctypes.POINTER(DLTFilter)), ("filter_counter", ctypes.c_int32), ("msg", DLTMessage)] def __init__(self, **kwords): self.verbose = kwords.pop("verbose", 0) self.filename = kwords.pop("filename", None) if six.PY3 and isinstance(self.filename, str): self.filename = bytes(self.filename, "utf-8") super(cDLTFile, self).__init__(**kwords) if dltlib.dlt_file_init(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not initialize DLTFile") self._iter_index = 0 self.corrupt_msg_count = 0 self.indexed = False self.end = False self.live_run = kwords.pop("is_live", False) self.stop_reading = threading.Event() def __repr__(self): # pylint: disable=bad-continuation return ''.format( "filename={}".format(self.filename) if self.filename else "", self.counter_total) def __del__(self): if dltlib.dlt_file_free(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not cleanup DLTFile") def _find_next_header(self): """Helper function for generate_index to skip over invalid storage headers. :returns: Offset to the next storage header position (after self.file_position), if it was found, or position of EOF if not :rtype: int """ with open(self.filename, "rb") as fobj: last_position = self.file_position # pylint: disable=access-member-before-definition fobj.seek(last_position) buf = fobj.read(1024) while buf: found = buf.find(b"DLT\x01") if found != -1: return last_position + found last_position = fobj.tell() buf = fobj.read(1024) return None # pylint: disable=attribute-defined-outside-init,access-member-before-definition def generate_index(self): """Generate an index for the loaded DLT file :returns: True if file had been previously read and the index is successfully generated, otherwise False :rtype: bool """ if not self.filename: return False self.indexed = False if dltlib.dlt_file_open(ctypes.byref(self), self.filename, self.verbose) >= DLT_RETURN_OK: # load, analyse data file and create index list if self.file_length == 0: raise IOError(DLT_EMPTY_FILE_ERROR) while self.file_position < self.file_length: ret = dltlib.dlt_file_read(ctypes.byref(self), self.verbose) if ret < DLT_RETURN_OK: # - This can happen if either the frame's storage # header could not be read correctly or the frame is # corrupt. If the frame's storage header could not # be read correctly we try to get the next storage # header and continue indexing next_header_position = self._find_next_header() if next_header_position: if self.file_position == next_header_position: # pylint: disable=no-else-break # - This this implies that dltlib.dlt_read_file() # returned due to an error other than invalid storage # header because we already were at the correct # header_position in the last iteration. So, we # need to break out of the read/index loop. break else: self.file_position = next_header_position self.corrupt_msg_count += 1 else: break self.indexed = True else: raise IOError(cDLT_FILE_NOT_OPEN_ERROR) return self.indexed def read(self, filename, filters=None): """Index the DLT trace file for optimized DLT Message access :param str filename: DLT log filename to read the messages from :param list filters: List of filters to apply [("APPID", "CTID"), ...] :returns: True if file was read and indexed successfully, otherwise False :rtype: bool """ # load the filters self.set_filters(filters) if six.PY3 and isinstance(filename, str): filename = bytes(filename, "utf-8") # read and index file self.filename = filename self.generate_index() return self.indexed def set_filters(self, filters): """Set filters to optimize access""" if filters is not None: dlt_filter = DLTFilter(verbose=self.verbose) for apid, ctid in filters: if six.PY3: if isinstance(apid, str): apid = bytes(apid, "ascii") if isinstance(ctid, str): ctid = bytes(ctid, "ascii") dlt_filter.add(apid, ctid) self.filters = dlt_filter dltlib.dlt_file_set_filter(ctypes.byref(self), ctypes.byref(dlt_filter), self.verbose) def __getitem__(self, index): """Load a DLT message from opened file :param int index: Index of a message to load :returns: Loaded DLTMessage :rtype: DLTMessage object :raises IndexError: If message index is out of boundary """ if index < 0: if self.counter == 0: self.read(self.filename) index = self.counter + index if index == 0 and self.counter == 0: self.read(self.filename) if index < 0 or index >= self.counter: raise IndexError("Index out of range (0 < %d < %d)" % (index, self.counter)) dltlib.dlt_file_message(ctypes.byref(self), index, self.verbose) # deepcopy the object msg = DLTMessage.from_buffer_copy(self.msg) # pylint: disable=no-member msg.databuffer.contents = ctypes.create_string_buffer(self.msg.datasize) ctypes.memmove(msg.databuffer, self.msg.databuffer, msg.datasize) # set the new storage header pointer offset = 0 hdr = cDltStorageHeader.from_address(ctypes.addressof(msg.headerbuffer) + offset) # pylint: disable=no-member msg.p_storageheader = ctypes.pointer(hdr) # set the new standard header pointer offset = ctypes.sizeof(cDltStorageHeader) hdr = cDltStandardHeader.from_address(ctypes.addressof(msg.headerbuffer) + offset) # pylint: disable=no-member msg.p_standardheader = ctypes.pointer(hdr) # set the new extended header pointer if self.msg.use_extended_header: offset = ctypes.addressof(self.msg.p_extendedheader.contents) - ctypes.addressof(self.msg.headerbuffer) # pylint: disable=no-member hdr = cDltExtendedHeader.from_address(ctypes.addressof(msg.headerbuffer) + offset) msg.p_extendedheader = ctypes.pointer(hdr) return msg def _open_file(self): """Open the configured file for processing""" file_opened = False while not self.stop_reading.isSet(): if dltlib.dlt_file_open(ctypes.byref(self), self.filename, self.verbose) >= DLT_RETURN_OK: file_opened = True break if not self.live_run: break time.sleep(0.5) if not file_opened: logger.error("DLT FILE OPEN FAILED - Analysis will not be performed") raise IOError(cDLT_FILE_NOT_OPEN_ERROR) def _log_message_progress(self): """Logs current message for progress information""" length = os.stat(self.filename).st_size logger.debug( "Processed %s messages (%s%% of %sfile), next message is apid %s, ctid %s", self.position, int(100 * self.file_position / length), "live " if self.live_run else "", self.msg.apid, self.msg.ctid, ) def __iter__(self): # pylint: disable=too-many-branches """Iterate over messages in the file""" logger.debug("Starting File Read") logger.debug("File Position: %d File Counter: %d File Name: %s", self.file_position, self.counter, self.filename) cached_mtime = 0 cached_file_pos = 0 corruption_check_try = True self._open_file() found_data = False while not self.stop_reading.isSet() or corruption_check_try: # pylint: disable=too-many-nested-blocks os_stat = os.stat(self.filename) mtime = os_stat.st_mtime if mtime != cached_mtime and os_stat.st_size or corruption_check_try: cached_mtime = mtime corruption_check_try = False while dltlib.dlt_file_read(ctypes.byref(self), self.verbose) >= DLT_RETURN_OK: found_data = True if self.filter and dltlib.dlt_message_filter_check( ctypes.byref(self.msg), self.filter, 0) != DLT_RETURN_TRUE: continue index = self.position msg = self[index] if not index % 100000: self._log_message_progress() yield msg if cached_file_pos != self.file_position: # We were able to read messages, don't do a corrupt message check yet. corruption_check_try = True cached_file_pos = self.file_position else: next_header_position = self._find_next_header() if next_header_position: if self.file_position == next_header_position: if not self.live_run: logger.warning("Incomplete message while parsing DLT file at %s", self.file_position) break else: logger.warning("Found a corrupt message at %s, skipping it", self.file_position) self.file_position = next_header_position self.corrupt_msg_count += 1 corruption_check_try = True # Wait for further messages to determine if corrupt, else just end of file else: if not self.live_run: logger.info("End of file reached at %s", self.file_position) break time.sleep(0.1) if not found_data: raise IOError(DLT_EMPTY_FILE_ERROR) def __len__(self): """Returns filtered file length""" return self.counter class DLTClient(cDltClient): """DLTClient class takes care about correct initialization and cleanup""" verbose = 0 def __init__(self, **kwords): self.verbose = kwords.pop("verbose", 0) if dltlib.dlt_client_init(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not initialize DLTClient") if "servIP" in kwords: serv_ip = kwords.pop("servIP") if isinstance(serv_ip, str): serv_ip = serv_ip.encode('utf8') ip_init_state = dltlib.dlt_client_set_server_ip(ctypes.byref(self), ctypes.create_string_buffer(serv_ip)) if ip_init_state == DLT_RETURN_ERROR: raise RuntimeError("Could not initialize servIP for DLTClient") # attribute to hold a reference to the connected socket in case # we created a connection with a timeout (via python, as opposed # to dltlib). This avoids the socket object from being garbage # collected when it goes out of the connect() method scope self._connected_socket = None super(DLTClient, self).__init__(**kwords) # (re)set self.port, even for API version <2.16.0 since we use # it ourselves elsewhere self.port = kwords.get("port", DLT_DAEMON_TCP_PORT) def __del__(self): if dltlib.dlt_client_cleanup(ctypes.byref(self), self.verbose) == DLT_RETURN_ERROR: raise RuntimeError("Could not cleanup DLTClient") self.disconnect() def connect(self, timeout=None): """Connect to the server If timeout is provided, block on connect until timeout occurs. If timeout is not provided or is None, try to connect and return immediately :param int|None timeout: Seconds to wait for connection :returns: True if connected successfully, False otherwise :rtype: bool """ connected = None error_count = 0 if timeout: end_time = time.time() + timeout while time.time() < end_time: timeout_remaining = max(end_time - time.time(), 1) if timeout else None try: self._connected_socket = socket.create_connection((ctypes.string_at(self.servIP), self.port), timeout=timeout_remaining) except IOError as exc: if error_count < MAX_LOG_IN_ROW: logger.debug("DLT client connect failed to connect to %s:%s : %s", self.servIP, self.port, exc) error_count += 1 time.sleep(1) if self._connected_socket: # pylint: disable=attribute-defined-outside-init self.sock = ctypes.c_int(self._connected_socket.fileno()) # - also init the receiver to replicate # dlt_client_connect() behavior connected = dltlib.dlt_receiver_init(ctypes.byref(self.receiver), self.sock, DLT_CLIENT_RCVBUFSIZE) break else: connected = dltlib.dlt_client_connect(ctypes.byref(self), self.verbose) # - create a python socket object so that we can detect # connection loss in the main_loop below as described at # http://stefan.buettcher.org/cs/conn_closed.html self._connected_socket = socket.fromfd(self.sock, socket.AF_INET6, socket.SOCK_STREAM) if error_count > MAX_LOG_IN_ROW: logger.debug("Surpressed %d messages for failed connection attempts", error_count - MAX_LOG_IN_ROW) return connected == DLT_RETURN_OK def disconnect(self): """Close all sockets""" if self._connected_socket: try: self._connected_socket.shutdown(socket.SHUT_RDWR) except IOError: pass except Exception: # pylint: disable=broad-except logger.exception("Unexpected exception while shutting down connection") try: self._connected_socket.close() except IOError: pass except Exception: # pylint: disable=broad-except logger.exception("Unexpected exception while disconnecting") def read_message(self, verbose=False): """Read new message :param bool verbose: Log every dlt_message_read(). Set True only for debugging. :returns: A new DLTMessage on successful read, None otherwise :rtype: DLTMessage|None """ msg = DLTMessage(verbose=verbose) res = dltlib.dlt_message_read(ctypes.byref(msg), ctypes.cast(self.receiver.buf, ctypes.POINTER(ctypes.c_uint8)), ctypes.c_uint(self.receiver.bytesRcvd), # length ctypes.c_int(0), # resync ctypes.c_int(verbose)) # verbose if res != DLT_MESSAGE_ERROR_OK: # - failed to read a complete message, possibly read an incomplete # message return None # prepare storage header if msg.standardheader.htyp & DLT_HTYP_WEID: dltlib.dlt_set_storageheader(msg.p_storageheader, msg.headerextra.ecu) else: dltlib.dlt_set_storageheader(msg.p_storageheader, ctypes.c_char_p("")) return msg # NEW_API - ensure backwards compatibility @property def serial_mode(self): """Get the mode""" return getattr(self, "mode", getattr(super(DLTClient, self), "serial_mode", 0)) # pylint: disable=too-many-arguments,too-many-return-statements,too-many-branches def py_dlt_client_main_loop(client, limit=None, verbose=0, dumpfile=None, callback=None): """Reimplementation of dlt_client.c:dlt_client_main_loop() in order to handle callback function return value""" bad_messages = 0 while True: if bad_messages > 100: # Some bad data is coming in and we can not recover - raise an error to cause a reconnect logger.warning("Dropping connection due to multiple malformed messages") return False # check connection status by peeking on the socket for data. # Note that if the remote connection is abruptly terminated, # this will raise a socket.timeout exception which the caller is # expected to handle (possibly by attempting a reconnect) # pylint: disable=protected-access try: ready_to_read = client._connected_socket.recv(1, socket.MSG_PEEK | socket.MSG_DONTWAIT) except OSError as os_exc: logger.error("[%s]: DLTLib closed connected socket", os_exc) return False if not ready_to_read: # - implies that the other end has called close()/shutdown() # (ie: clean disconnect) logger.debug("connection terminated, returning") return False # - check if stop flag has been set (end of loop) if callback and not callback(None): logger.debug("callback returned 'False'. Stopping main loop") return False # we now have data to read. Note that dlt_receiver_receive() # is a blocking call that only returns if there is data to be # read or the remote end closes connection. So, irrespective of # the status of the callback (in the case of dlt_broker, this is # the stop_flag Event), this loop will only proceed after the # function has returned or terminate when an exception is raised recv_size = dltlib.dlt_receiver_receive(ctypes.byref(client.receiver), DLT_RECEIVE_SOCKET) if recv_size <= 0: logger.error("Error while reading from socket") return False msg = client.read_message(verbose) while msg: try: if msg.apid == b"" and msg.ctid == b"": logger.debug("Received a corrupt message") bad_messages += 1 except AttributeError: logger.debug("Skipping a very corrupted message") bad_messages += 1 msg = client.read_message() continue bad_messages = 0 # save the message if dumpfile: dumpfile.write(msg.to_bytes()) # remove message from receiver buffer size = msg.headersize + msg.datasize - ctypes.sizeof(cDltStorageHeader) if msg.found_serialheader: size += DLT_ID_SIZE if dltlib.dlt_receiver_remove(ctypes.byref(client.receiver), size) < 0: logger.error("dlt_receiver_remove failed") return False # send the message to the callback and check whether we # need to continue if callback and not callback(msg): logger.debug("callback returned 'False'. Stopping main loop") break if limit is not None: limit -= 1 if limit == 0: break # read the next message msg = client.read_message() else: # - failed to read a complete message, rewind the client # receiver buffer pointer to start of the buffer if dltlib.dlt_receiver_move_to_begin(ctypes.byref(client.receiver)) == DLT_RETURN_ERROR: logger.error("dlt_receiver_move_to_begin failed") return False # Check if we need to keep going if callback and not callback(msg): logger.debug("callback returned 'False'. Stopping main loop") break return True def save(messages, filename, append=False): """Save DLT messages to a file :param list messages: List of messages to save :param str filename: Filename for the DLT log file the messages will be stored to :param bool append: New data will be appended to an existing file if set to True """ with open(filename, "ab" if append else "wb") as tracefile: for msg in messages: tracefile.write(msg.to_bytes()) def load(filename, filters=None, split=False, verbose=False, live_run=False): """Load DLT messages from a file :param str filename: Filename for the DLT log file the messages will be store to :param list filters: List of filters to apply [("APPID", "CTID"), ...] :param bool split: Ignored - compatibility option :param bool verbose: Be verbose :returns: A DLTFile object :rtype: DLTFile object """ cfile = cDLTFile(filename=filename, is_live=live_run) cfile.set_filters(filters) return cfile python-dlt-2.0/dlt/dlt_broker.py000066400000000000000000000077051354710635300167610ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """DLT Broker is running in a loop in a separate thread until stop_flag is set and adding received messages to all registered queues""" from __future__ import print_function, absolute_import import logging from multiprocessing import Event, Queue from dlt.dlt_broker_handlers import DLT_DAEMON_TCP_PORT, DLTContextHandler, DLTMessageHandler DLT_CLIENT_TIMEOUT = 5 logger = logging.getLogger(__name__) # pylint: disable=invalid-name class DLTBroker(object): """DLT Broker class manages receiving and filtering of DLT Messages """ def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param **kwargs: All other args passed to DLTMessageHandler """ # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler(self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename") def start(self): """DLTBroker main worker method""" logger.debug("Starting DLTBroker with parameters: use_proxy=%s, ip_address=%s, port=%s, filename=%s", False, self._ip_address, self._port, self._filename) self.msg_handler.start() self.context_handler.start() # - ensure we don't block on join_thread() in stop() # https://docs.python.org/2.7/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread self.filter_queue.cancel_join_thread() self.message_queue.cancel_join_thread() def add_context(self, context_queue, filters=None): """Register context :param Queue context_queue: The queue to which new messages will be added :param tuple filters: An list of tuples (eg: [(apid, ctid)]) used to filter messages that go into this queue. """ if filters is None: filters = [(None, None)] if not isinstance(filters, (tuple, list)): raise RuntimeError("Context queue filters must be a tuple." " Ex. (('SYS', 'JOUR'), ('AUDI', 'CAPI'))") self.context_handler.register(context_queue, filters) def remove_context(self, context_queue): """Unregister context :param Queue context_queue: The queue to unregister. """ self.context_handler.unregister(context_queue) def stop(self): """Stop the broker""" logger.info("Stopping DLTContextHandler and DLTMessageHandler") # - stop the DLTMessageHandler process and DLTContextHandler thread self.mp_stop_flag.set() self.context_handler.stop() logger.debug("Waiting on DLTContextHandler and DLTMessageHandler") self.context_handler.join() if self.msg_handler.is_alive(): try: self.msg_handler.terminate() except OSError: pass else: self.msg_handler.join() logger.debug("DLTBroker execution done") # pylint: disable=invalid-name def isAlive(self): """Backwards compatibility method Called from mtee.testing.connectors.tools.broker_assert. Will need to be replaced in MTEE eventually. """ return any((self.msg_handler.is_alive(), self.context_handler.is_alive())) python-dlt-2.0/dlt/dlt_broker_handlers.py000066400000000000000000000177711354710635300206450ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """Handlers are classes that assist dlt_broker in receiving and filtering DLT messages """ from __future__ import absolute_import import logging import socket import time from collections import defaultdict from threading import Thread, Event, Lock from multiprocessing import Process from multiprocessing.queues import Empty from dlt.dlt import DLTClient, DLT_DAEMON_TCP_PORT, py_dlt_client_main_loop DLT_CLIENT_TIMEOUT = 5 logger = logging.getLogger(__name__) # pylint: disable=invalid-name class DLTContextHandler(Thread): """Communication layer between the DLTContext instances and DLTMessageHandler Process. This class handles the transfer of messages between the process receiving traces from the DLT Daemon and the DLTContext queues. """ def __init__(self, filter_queue, message_queue): super(DLTContextHandler, self).__init__() self.stop_flag = Event() self.context_map = {} self.lock = Lock() self.filter_queue = filter_queue self.message_queue = message_queue def register(self, queue, filters=None): """Register a queue to collect messages matching specific filters :param Queue queue: The new queue to add :param tuple filters: An tuple with (apid, ctid) used to filter messages that go into this queue. """ if filters is None: filters = [(None, None)] queue_id = id(queue) # - unique identifier for this queue with self.lock: self.context_map[queue_id] = (queue, filters) # - inform the DLTMessageHandler process about this new # (queue, filter) pair self.filter_queue.put((queue_id, filters, True)) def unregister(self, queue): """Remove a queue from set of queues being handled :param Queue queue: The queue to remove """ queue_id = id(queue) _, filters = self.context_map.get(queue_id, (None, None)) if filters: with self.lock: try: del(self.context_map[queue_id]) except KeyError: pass # - inform the DLTMessageHandler process about removal of this # (queue, filter) pair self.filter_queue.put((queue_id, filters, False)) def run(self): """The thread's main loop """ while not self.stop_flag.is_set(): queue_id, message = None, None try: if self.message_queue.full(): logger.error("message_queue is full ! put() on this queue will block") queue_id, message = self.message_queue.get_nowait() except Empty: pass if message: queue, _ = self.context_map.get(queue_id, (None, None)) if queue: queue.put(message) else: time.sleep(0.01) def stop(self): """Stops thread execution""" self.stop_flag.set() self.filter_queue.close() if self.is_alive(): self.join() class DLTMessageHandler(Process): """Process receiving the DLT messages and handing them to DLTContextHandler This process instance is responsible for collecting messages from the DLT daemon, tagging them with the correct queue id and placing them on the messages queue. """ def __init__(self, filter_queue, message_queue, mp_stop_event, client_cfg): self.filter_queue = filter_queue self.message_queue = message_queue self.mp_stop_flag = mp_stop_event super(DLTMessageHandler, self).__init__() # - dict mapping filters to queue ids self.context_map = defaultdict(list) self._ip_address = client_cfg["ip_address"] self._port = client_cfg.get("port", DLT_DAEMON_TCP_PORT) self._filename = client_cfg.get("filename") self.verbose = client_cfg.get("verbose", 0) self.timeout = client_cfg.get("timeout", DLT_CLIENT_TIMEOUT) self._client = None self.tracefile = None def _client_connect(self): """Create a new DLTClient :param int timeout: Time in seconds to wait for connection. :returns: True if connected, False otherwise :rtype: bool """ logger.debug("Creating DLTClient (ip_address='%s', Port='%s', logfile='%s')", self._ip_address, self._port, self._filename) self._client = DLTClient(servIP=self._ip_address, port=self._port, verbose=self.verbose) connected = self._client.connect(self.timeout) if connected: logger.info("DLTClient connected to %s", self._client.servIP) return connected def _process_filter_queue(self): """Check if filters have been added or need to be removed""" while not self.filter_queue.empty(): queue_id, filters, add = self.filter_queue.get_nowait() if add: for apid_ctid in filters: self.context_map[apid_ctid].append(queue_id) else: try: for apid_ctid in filters: self.context_map[apid_ctid].remove(queue_id) if not self.context_map[apid_ctid]: del(self.context_map[apid_ctid]) except (KeyError, ValueError): # - queue_id already removed or not inserted pass def handle(self, message): """Function to be called for every message received :param DLTMessage message: received new DLTMessage instance :returns: True if the loop should continue, False to stop the loop and exit :rtype: bool """ self._process_filter_queue() if message is not None and not (message.apid == "" and message.ctid == ""): for filters, queue_ids in self.context_map.items(): if filters in [(message.apid, message.ctid), (None, None), (message.apid, None), (None, message.ctid)]: for queue_id in queue_ids: if self.message_queue.full(): logger.error("message_queue is full ! put() on this queue will block") self.message_queue.put((queue_id, message)) return not self.mp_stop_flag.is_set() def run(self): """DLTMessageHandler worker method""" if self._filename is not None: logger.info("Opening the DLT trace file '%s'", self._filename) self.tracefile = open(self._filename, mode="ab", buffering=False) while not self.mp_stop_flag.is_set(): exception_occured = False if not self._client_connect(): # keep trying to reconnect, until we either successfully # connect or the stop_flag is set continue try: res = py_dlt_client_main_loop(self._client, verbose=0, callback=self.handle, dumpfile=self.tracefile) if res is False and not self.mp_stop_flag.is_set(): # main loop returned False logger.error("DLT connection lost. Restarting DLT client") exception_occured = True except KeyboardInterrupt: exception_occured = True logger.debug("main loop manually interrupted") break except socket.timeout as exc: exception_occured = True logger.error("socket timeout error") logger.debug(exc) except Exception: # pylint: disable=broad-except exception_occured = True logger.exception("Exception during the DLT message receive") finally: if exception_occured: logger.debug("Closing open socket connections.") self._client.disconnect() self.message_queue.close() logger.info("DLTMessageHandler worker execution complete") python-dlt-2.0/dlt/helpers.py000066400000000000000000000036651354710635300162750ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """DLT client helpers""" import six class LimitCondition(object): """Condition object for counting messages""" def __init__(self, limit): """Constructor :param int limit: The maximum number of the messages for the condition """ self.limit = limit def __call__(self): if self.limit is None: return True self.limit = self.limit - 1 return self.limit >= 0 class ContinuousnessChecker(object): """ContinuousnessChecker class is intended to find problems in the order of DLT messages""" _ignore = ["DA1-DC1-0"] # control message will be ignored - there is no continuation def __init__(self, start=0): self._index = start self._counter = dict() def __call__(self, message): key = "{}-{}-{}".format(message.apid, message.ctid, message.seid) self._index += 1 if key in self._ignore: return if key in self._counter: # message of current type already received - check the continuousness err_msg = "Missing message detected. Message" err_msg += " #{} (apid='%s', ctid='%s', seid='%s')" % (message.apid, message.ctid, message.seid) err_msg += " should have counter '{}' instead of '{}'" if not (self._counter[key] + 1) % 256 == message.mcnt: counter = self._counter[key] self._counter[key] = message.mcnt raise RuntimeError(err_msg.format(self._index - 1, (counter + 1) % 256, message.mcnt)) self._counter[key] = message.mcnt else: # first message of current type self._counter[key] = message.mcnt def bytes_to_str(byte_or_str): """Return string from bytes""" if six.PY3: if isinstance(byte_or_str, bytes): return byte_or_str.decode('utf8') return str(byte_or_str) python-dlt-2.0/dlt/py_dlt_receive.py000066400000000000000000000031211354710635300176130ustar00rootroot00000000000000# Copyright (C) 2017. BMW Car IT GmbH. All rights reserved. """DLT Receive using py_dlt""" from __future__ import absolute_import import argparse import logging import time from dlt.dlt_broker import DLTBroker logging.basicConfig(format='%(asctime)s %(name)s %(levelname)-8s %(message)s') root_logger = logging.getLogger() # pylint: disable=invalid-name logger = logging.getLogger("py-dlt-receive") # pylint: disable=invalid-name def parse_args(): """Parse command line arguments""" logger.info("Parsing arguments") parser = argparse.ArgumentParser(description="Receive DLT messages") parser.add_argument("--host", required=True, help="hostname or ip address to connect to") parser.add_argument("--file", required=True, help="The file into which the messages will be written") return parser.parse_args() def dlt_receive(options): """Receive DLT messages via DLTBroker""" logger.info("Creating DLTBroker instance") broker = DLTBroker(ip_address=options.host, filename=options.file) logger.info("Starting DLTBroker") broker.start() # start the loop try: logger.info("Receiving messages...") while True: time.sleep(.1) except KeyboardInterrupt: logger.info("Interrupted...") finally: logger.info("Stopping DLT broker") broker.stop() logger.info("Stopped DLT broker") def main(): """Main function""" root_logger.setLevel(level=logging.INFO) options = parse_args() logger.info("Parsed arguments: %s", options) dlt_receive(options) if __name__ == "__main__": main() python-dlt-2.0/requirements.txt000066400000000000000000000000571354710635300167520ustar00rootroot00000000000000coverage==4.5.4 nose==1.3.7 pycodestyle==2.5.0 python-dlt-2.0/requirements2.7.txt000066400000000000000000000000401354710635300171710ustar00rootroot00000000000000mock subprocess32 pylint==1.9.5 python-dlt-2.0/requirements3.txt000066400000000000000000000000161354710635300170300ustar00rootroot00000000000000pylint==2.4.2 python-dlt-2.0/setup.cfg000066400000000000000000000032351354710635300153100ustar00rootroot00000000000000[options] setup_requires = setuptools_scm [pycodestyle] max-line-length = 119 [TYPECHECK] disable=bad-option-value [MESSAGES CONTROL] # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. See also the "--disable" option for examples. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" # Disabled messages # C0325 Unnecessary parens after 'print' keyword # I0011 Locally disabling %s Used when an inline option disable a message or a messages category # R0201 Method could be a function # R0205 Useless object inheritance # R0902 Too many instance attributes # R0903 Too few public methods # R0912 Too many branches (%s/%s) Used when a function or method has too many branches, making it hard to follow. # R0913 Too many arguments # R0921 Abstract class not referenced # R0922 Abstract class is only referenced 1 times # R0801 Similar code in multiple files # W0511 Fixme # W0613 Unused argument disable=I0011,R0201,R0205,R0902,R0903,R0912,R0913,R0921,R0922,R0801,C0325,W0511,W0613 python-dlt-2.0/setup.py000066400000000000000000000016551354710635300152050ustar00rootroot00000000000000import setuptools setuptools.setup( name="dlt", description="Python DLT implementation for DLT", use_scm_version=True, url="https://github.com/bmwcarit/python-dlt", author="BMW Car IT", license="MPL 2.0", classifiers=[ # See:https://pypi.org/classifiers/ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Intended Audience :: Developers", "Topic :: Software Development", "Topic :: System :: Logging", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], keywords="dlt log trace testing", packages=setuptools.find_packages(exclude=["tests", "tests.*"]), install_requires=[], zip_safe=False, test_suite="tests", entry_points={ 'console_scripts': [ 'py_dlt_receive = dlt.py_dlt_receive:main', ], }, ) python-dlt-2.0/tests/000077500000000000000000000000001354710635300146265ustar00rootroot00000000000000python-dlt-2.0/tests/__init__.py000066400000000000000000000000741354710635300167400ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. python-dlt-2.0/tests/condition_tests.py000066400000000000000000000010141354710635300204040ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. from nose.tools import assert_equals, assert_false, assert_true, raises from dlt.helpers import LimitCondition class TestsLimitCondition(object): __test__ = True def test_none(self): cond = LimitCondition(None) assert_true(cond()) def test_limit_decreasing(self): cond = LimitCondition(2) cond() assert_equals(cond.limit, 1) assert_true(cond()) # limit=0 assert_false(cond()) # limit=-1 python-dlt-2.0/tests/continuousness_helper_test.py000066400000000000000000000045411354710635300227010ustar00rootroot00000000000000from nose.tools import raises from dlt.helpers import ContinuousnessChecker class Msg(object): def __init__(self, apid, ctid, seid, mcnt): self.apid = apid self.ctid = ctid self.seid = seid self.mcnt = mcnt def run_check(messages): cont = ContinuousnessChecker() for msg in messages: cont(msg) class TestsContinuousness(object): def test_simple(self): messages = [ Msg("X", "Y", "99", 4), Msg("X", "Y", "99", 5), Msg("X", "Y", "99", 6), Msg("X", "Y", "99", 7), Msg("X", "Y", "99", 8) ] run_check(messages) @raises(RuntimeError) def test_simple_missing(self): messages = [ Msg("X", "Y", "99", 4), Msg("X", "Y", "99", 5), Msg("X", "Y", "99", 6), # 7 is missing Msg("X", "Y", "99", 8), Msg("X", "Y", "99", 9) ] run_check(messages) def test_simple_over(self): # message counter is a unsigned char so counts till 255 and then restarted back to 0 messages = [ Msg("X", "Y", "99", 254), Msg("X", "Y", "99", 255), Msg("X", "Y", "99", 0), Msg("X", "Y", "99", 1) ] run_check(messages) @raises(RuntimeError) def test_simple_reset(self): messages = [ Msg("X", "Y", "99", 230), Msg("X", "Y", "99", 231), Msg("X", "Y", "99", 0) ] run_check(messages) def test_ignore_control(self): messages = [ Msg("DA1", "DC1", "0", 0), Msg("X", "Y", "99", 231), Msg("DA1", "DC1", "0", 0) ] run_check(messages) def test_zeros_da1_dc1(self): messages = [ Msg("DA1", "DC1", "0", 0), Msg("DA1", "DC1", "0", 0) ] run_check(messages) @raises(RuntimeError) def test_zeros_non_da1_dc1(self): messages = [ Msg("X", "Y", "0", 0), Msg("X", "Y", "0", 0) ] run_check(messages) python-dlt-2.0/tests/dlt_client_unit_tests.py000066400000000000000000000022751354710635300216100ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. """Basic unittests for DLTClient class""" import unittest try: from mock import patch, Mock except ImportError: from unittest.mock import patch, Mock from dlt.dlt import DLTClient, DLT_RETURN_OK, DLT_RETURN_ERROR class TestDLTClient(unittest.TestCase): def setUp(self): # - patch port so that connect fails even if dlt-daemon is running self.client = DLTClient(servIP='127.0.0.1', port=424242) def test_connect_with_timeout_failed(self): # - timeout error self.assertFalse(self.client.connect(timeout=2)) # - dlt_receiver_init error with patch('socket.create_connection', return_value=Mock(fileno=Mock(return_value=2000000))), \ patch('dlt.dlt.dltlib.dlt_receiver_init', return_value=DLT_RETURN_ERROR): self.assertFalse(self.client.connect(timeout=2)) def test_connect_with_timeout_success(self): with patch('socket.create_connection', return_value=Mock(fileno=Mock(return_value=2000000))), \ patch('dlt.dlt.dltlib.dlt_receiver_init', return_value=DLT_RETURN_OK): self.assertTrue(self.client.connect(timeout=2)) python-dlt-2.0/tests/dlt_context_handler_unit_tests.py000066400000000000000000000154311354710635300235110ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. import time import unittest try: from Queue import Queue except ImportError: from queue import Queue import six from multiprocessing.queues import Empty if six.PY2: from multiprocessing.queues import Queue as mp_queue else: from multiprocessing import Queue as mp_queue from dlt.dlt_broker_handlers import DLTContextHandler from .utils import create_messages, stream_one, stream_multiple class TestDLTContextHandler(unittest.TestCase): def setUp(self): self.filter_queue = mp_queue() self.message_queue = mp_queue() self.handler = DLTContextHandler(self.filter_queue, self.message_queue) def test_init(self): self.assertFalse(self.handler.stop_flag.is_set()) self.assertFalse(self.handler.is_alive()) self.assertTrue(self.handler.filter_queue.empty()) self.assertTrue(self.handler.message_queue.empty()) def test_register_no_filter(self): queue = Queue() queue_id = id(queue) self.handler.register(queue) # When no filter is specified, filter (None, None) should be # added (ie: match all messages) self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id], (queue, [(None, None)])) self.assertEqual(self.handler.filter_queue.get(), (queue_id, [(None, None)], True)) def test_register_single_filter(self): queue = Queue() queue_id = id(queue) filters = ("SYS", "JOUR") self.handler.register(queue, filters) # Specified, filter should be added to filter_queue self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id], (queue, filters)) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True)) def test_register_similar_filters(self): queue0 = Queue() queue_id0 = id(queue0) filters0 = ("SYS", "JOUR") queue1 = Queue() queue_id1 = id(queue1) filters1 = ("SYS", "JOUR") self.handler.register(queue0, filters0) self.handler.register(queue1, filters1) # Each queue should have a unique entry in the context_map and # filter_queue (even if they have the same filter) self.assertIn(queue_id0, self.handler.context_map) self.assertIn(queue_id1, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id0], (queue0, filters0)) self.assertEqual(self.handler.context_map[queue_id1], (queue1, filters1)) self.assertEqual(self.handler.filter_queue.get(), (queue_id0, filters0, True)) self.assertEqual(self.handler.filter_queue.get(), (queue_id1, filters1, True)) def test_unregister(self): queue = Queue() queue_id = id(queue) filters = ("SYS", "JOUR") self.handler.register(queue, filters) self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True)) self.handler.unregister(queue) self.assertNotIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, False)) def test_run_no_messages(self): try: self.handler.start() time.sleep(0.2) self.handler.stop() self.assertTrue(self.handler.stop_flag.is_set()) self.assertFalse(self.handler.is_alive()) except: self.fail() def test_run_single_context_queue(self): queue = Queue() queue_id = id(queue) filters = ("DA1", "DC1") self.handler.register(queue, filters) self.handler.start() # - simulate feeding of messages into the message_queue for _ in range(10): self.handler.message_queue.put((queue_id, create_messages(stream_one))) try: for _ in range(10): queue.get(timeout=0.01) except Empty: # - we should not get an Empty for exactly 10 messages self.fail() finally: self.handler.stop() def test_run_multiple_context_queue(self): self.handler.start() queue0 = Queue() queue_id0 = id(queue0) filters0 = ("DA1", "DC1") self.handler.register(queue0, filters0) queue1 = Queue() queue_id1 = id(queue1) filters1 = ("SYS", "JOUR") self.handler.register(queue1, filters1) # - queue with no filter queue2 = Queue() queue_id2 = id(queue2) self.handler.register(queue2) # - simulate feeding of messages into the message_queue for _ in range(10): for message in create_messages(stream_multiple, from_file=True): queue_id = queue_id0 if message.apid == 'DA1' else queue_id1 self.handler.message_queue.put((queue_id, message)) # - simulate feeding of all messages for the queue with # no filter. self.handler.message_queue.put((queue_id2, message)) try: da1_messages = [] sys_messages = [] all_messages = [] for _ in range(10): da1_messages.append(queue0.get(timeout=0.01)) sys_messages.append(queue1.get(timeout=0.01)) all_messages.append(queue2.get(timeout=0.01)) # these queues should not get any messages from other queues self.assertTrue(all(msg.apid == 'DA1' for msg in da1_messages)) self.assertTrue(all(msg.apid == 'SYS' for msg in sys_messages)) # this queues should get all messages self.assertFalse(all(msg.apid == 'DA1' for msg in all_messages) or all(msg.apid == 'SYS' for msg in all_messages)) except Empty: # - we should not get an Empty for at least 10 messages self.fail() finally: self.handler.stop() def test_run_unregister_with_unread_messages(self): self.handler.start() queue = Queue() queue_id = id(queue) filters = ("DA1", "DC1") self.handler.register(queue, filters) self.assertIn(queue_id, self.handler.context_map) self.handler.unregister(queue) # - simulate feeding of messages into the message_queue for _ in range(3): self.handler.message_queue.put((queue_id, create_messages(stream_one))) try: self.assertNotIn(queue_id, self.handler.context_map) # allow some time for the thread to read all messages time.sleep(0.5) self.assertTrue(self.handler.message_queue.empty()) self.assertTrue(queue.empty()) finally: self.handler.stop() python-dlt-2.0/tests/dlt_core_unit_tests.py000066400000000000000000000062241354710635300212600ustar00rootroot00000000000000# Copyright (C) 2017. BMW Car IT GmbH. All rights reserved. """Basic size tests for ctype wrapper definitions, to protect against regressions""" import os import unittest import ctypes try: from mock import patch, MagicMock except ImportError: from unittest.mock import patch, MagicMock import dlt class TestCoreStructures(unittest.TestCase): def setUp(self): self.size_map = {'cDltServiceConnectionInfo': 10, 'cDltStorageHeader': 16, 'cDltStandardHeader': 4, 'cDltStandardHeaderExtra': 12, 'cDltExtendedHeader': 10, 'cDLTMessage': 120, 'cDltReceiver': 48, 'cDltClient': 104} def test_sizeof(self): for clsname, expected in self.size_map.items(): acutal = ctypes.sizeof(getattr(dlt.core, clsname)) self.assertEqual(acutal, expected, msg="v{0}, sizeof {1}: {2} != {3}".format( dlt.core.get_version(dlt.core.dltlib), clsname, acutal, expected)) class TestImportSpecificVersion(unittest.TestCase): def setUp(self): self.original_api_version = dlt.core.API_VER self.version_answer = b"2.18.4" self.version_str = (b"DLT Package Version: 2.18.4 STABLE, Package Revision: v2.18.4, " b"build on Sep 20 2019 10:03:53\n+SYSTEMD -SYSTEMD_WATCHDOG -TEST -SHM\n") self.version_filename = "core_2180.py" self.version_truncate_str = "2.18.42" dlt.core.API_VER = None def tearDown(self): dlt.core.API_VER = self.original_api_version def test_get_version(self): def mock_dlt_get_version(buf, buf_size): ver_cstr = ctypes.create_string_buffer(self.version_str) ctypes.memmove(buf, ver_cstr, len(ver_cstr)) mock_loaded_lib = MagicMock() mock_loaded_lib.dlt_get_version = MagicMock(side_effect=mock_dlt_get_version) api_version = dlt.core.get_version(mock_loaded_lib) self.assertEqual(mock_loaded_lib.dlt_get_version.call_count, 1) self.assertEqual(api_version, self.version_answer.decode()) self.assertEqual(dlt.core.API_VER, self.version_answer.decode()) def test_get_api_specific_file(self): with patch.object(os.path, "exists", return_value=True): filename = dlt.core.get_api_specific_file(self.version_answer.decode()) self.assertEqual(filename, self.version_filename) def test_get_api_specific_file_not_found(self): with patch.object(os.path, "exists", return_value=False): with self.assertRaises(ImportError) as err_cm: filename = dlt.core.get_api_specific_file(self.version_answer.decode()) self.assertEqual(str(err_cm.exception), "No module file: {}".format(self.version_filename)) def test_get_api_specific_file_truncate_minor_version(self): with patch.object(os.path, "exists", return_value=True): filename = dlt.core.get_api_specific_file(self.version_truncate_str) self.assertEqual(filename, self.version_filename) python-dlt-2.0/tests/dlt_filter_unit_tests.py000066400000000000000000000053261354710635300216170ustar00rootroot00000000000000 # Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """Basic unittests for DLTFilter definition""" from __future__ import print_function import ctypes from nose.tools import * from dlt.dlt import DLTFilter, DLT_FILTER_MAX, DLT_ID_SIZE class TestDLTFilter(object): def setUp(self): self.dlt_filter = DLTFilter() def tearDown(self): del(self.dlt_filter) def test_init(self): assert_equal(len(self.dlt_filter.apid), DLT_FILTER_MAX) assert_equal(len(self.dlt_filter.ctid), DLT_FILTER_MAX) assert_equal(self.dlt_filter.counter, 0) for entry in self.dlt_filter.apid: assert_true(ctypes.string_at(entry, DLT_ID_SIZE) == b"\0\0\0\0") for entry in self.dlt_filter.ctid: assert_true(ctypes.string_at(entry, DLT_ID_SIZE) == b"\0\0\0\0") def test_add0(self): self.dlt_filter.add("AAA", "BBB") assert_equal(self.dlt_filter.counter, 1) assert_equal(len(self.dlt_filter.apid[0]), 4) assert_equal(len(self.dlt_filter.ctid[0]), 4) assert_true(ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAA\0") assert_true(ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBB\0") def test_add1(self): self.dlt_filter.add("AAA", "BBB") self.dlt_filter.add("XXX", "YYY") assert_equal(self.dlt_filter.counter, 2) assert_true(ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAA\0") assert_true(ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBB\0") assert_true(ctypes.string_at(self.dlt_filter.apid[1], DLT_ID_SIZE) == b"XXX\0") assert_true(ctypes.string_at(self.dlt_filter.ctid[1], DLT_ID_SIZE) == b"YYY\0") def test_add2(self): self.dlt_filter.add("AAAA", "BBBB") self.dlt_filter.add("XXX", "YYY") self.dlt_filter.add("CCCC", "DDDD") assert_equal(self.dlt_filter.counter, 3) assert_true(ctypes.string_at(self.dlt_filter.apid[0], DLT_ID_SIZE) == b"AAAA") assert_true(ctypes.string_at(self.dlt_filter.ctid[0], DLT_ID_SIZE) == b"BBBB") assert_true(ctypes.string_at(self.dlt_filter.apid[1], DLT_ID_SIZE) == b"XXX\0") assert_true(ctypes.string_at(self.dlt_filter.ctid[1], DLT_ID_SIZE) == b"YYY\0") assert_true(ctypes.string_at(self.dlt_filter.apid[2], DLT_ID_SIZE) == b"CCCC") assert_true(ctypes.string_at(self.dlt_filter.ctid[2], DLT_ID_SIZE) == b"DDDD") def test_repr(self): self.dlt_filter.add("AAAA", "BBBB") self.dlt_filter.add("XXX", "YYY") self.dlt_filter.add("CCCC", "DDDD") print(self.dlt_filter) assert_true(str(self.dlt_filter) == str([(b"AAAA", b"BBBB"), (b"XXX", b"YYY"), (b"CCCC", b"DDDD")])) python-dlt-2.0/tests/dlt_main_loop_unit_tests.py000066400000000000000000000066471354710635300223160ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. """Basic unittests for the py_dlt_client_main_loop function""" import ctypes import functools import socket import unittest try: from cStringIO import StringIO except ImportError: from io import BytesIO as StringIO import six try: from mock import patch, Mock except ImportError: from unittest.mock import patch, Mock from dlt.dlt import py_dlt_client_main_loop, DLTClient, logger from dlt.core import cDltStorageHeader from .utils import stream_one def mock_dlt_receiver_receive_socket(client_receiver, partial=False, Fail=False): if Fail: return 0 stream_one.seek(0) buf = stream_one.read() if partial: buf = buf[:16] client_receiver._obj.buf = ctypes.create_string_buffer(buf) client_receiver._obj.bytesRcvd = len(buf) return len(buf) class TestMainLoop(unittest.TestCase): def setUp(self): self.client = DLTClient() self.client._connected_socket = Mock() def test_target_down(self): with patch.object(self.client._connected_socket, 'recv', side_effect=socket.timeout): callback = Mock(return_value="should not be called") if six.PY3: with self.assertLogs(logger=logger) as dlt_logger: return_value = py_dlt_client_main_loop(self.client, callback=callback) self.assertFalse(return_value) log_output = dlt_logger.output self.assertEqual(len(log_output), 1) self.assertEqual(log_output[0], 'ERROR:dlt.dlt:[]: DLTLib closed connected socket') else: self.assertRaises(socket.timeout, py_dlt_client_main_loop, self.client, callback=callback) self.assertFalse(callback.called) def test_target_up_nothing_to_read(self): with patch.object(self.client._connected_socket, 'recv', return_value=b"") as mock_recv: callback = Mock(return_value="should not be called") self.assertFalse(py_dlt_client_main_loop(self.client, callback=callback)) self.assertEqual(mock_recv.call_count, 1) self.assertFalse(callback.called) @patch('dlt.dlt.dltlib.dlt_receiver_move_to_begin', return_value=0) def test_exit_if_callback_returns_false(self, *ignored): with patch.object(self.client._connected_socket, 'recv', return_value=b'X'): # setup dlt_receiver_receive to return a partial message replacement = functools.partial(mock_dlt_receiver_receive_socket, partial=True) with patch('dlt.dlt.dltlib.dlt_receiver_receive', new=replacement): self.assertFalse(py_dlt_client_main_loop(self.client, callback=lambda msg: False)) def test_read_message(self, *ignored): dumpfile = StringIO() stream_one.seek(0) expected = stream_one.read() with patch.object(self.client._connected_socket, 'recv', return_value=b'X'): # setup dlt_receiver_receive to return a complete message replacement = functools.partial(mock_dlt_receiver_receive_socket) callback = Mock(side_effect=[True, False, False]) with patch('dlt.dlt.dltlib.dlt_receiver_receive', new=replacement): self.assertTrue(py_dlt_client_main_loop(self.client, dumpfile=dumpfile, callback=callback)) self.assertEqual(dumpfile.getvalue()[ctypes.sizeof(cDltStorageHeader):], expected) python-dlt-2.0/tests/dlt_message_handler_unit_tests.py000066400000000000000000000134141354710635300234500ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. import os import time import unittest from multiprocessing.queues import Empty from multiprocessing import Event import six if six.PY2: from multiprocessing.queues import Queue else: from multiprocessing import Queue from dlt.dlt_broker_handlers import DLTMessageHandler from .utils import create_messages, stream_multiple class TestDLTMessageHandler(unittest.TestCase): def setUp(self): self.filter_queue = Queue() self.message_queue = Queue() self.client_cfg = {"ip_address": b"127.0.0.1", "filename": b"/dev/null", "verbose": 0, "port": "1234", } self.stop_event = Event() self.handler = DLTMessageHandler(self.filter_queue, self.message_queue, self.stop_event, self.client_cfg) def test_init(self): self.assertFalse(self.handler.mp_stop_flag.is_set()) self.assertFalse(self.handler.is_alive()) self.assertTrue(self.handler.filter_queue.empty()) self.assertTrue(self.handler.message_queue.empty()) def test_run_basic(self): self.assertFalse(self.handler.is_alive()) self.handler.start() self.assertTrue(self.handler.is_alive()) self.assertNotEqual(self.handler.pid, os.getpid()) self.stop_event.set() self.handler.join() self.assertFalse(self.handler.is_alive()) def test_handle_add_new_filter(self): self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], True)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id"]) def test_handle_remove_filter_single_entry(self): self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], True)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id"]) self.handler.filter_queue.put(("queue_id", [("SYS", "JOUR")], False)) time.sleep(0.01) self.handler.handle(None) self.assertNotIn(("SYS", "JOUR"), self.handler.context_map) def test_handle_remove_filter_multiple_entries(self): self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True)) self.handler.filter_queue.put(("queue_id2", [("SYS", "JOUR")], True)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id1", "queue_id2"]) self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], False)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id2"]) def test_handle_multiple_similar_filters(self): self.handler.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True)) self.handler.filter_queue.put(("queue_id1", [("SYS", "JOUR")], True)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id0", "queue_id1"]) def test_handle_multiple_different_filters(self): self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True)) self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True)) time.sleep(0.01) self.handler.handle(None) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertIn(("DA1", "DC1"), self.handler.context_map) self.assertEqual(self.handler.context_map[("SYS", "JOUR")], ["queue_id0"]) self.assertEqual(self.handler.context_map[("DA1", "DC1")], ["queue_id1"]) def test_handle_message_tag_and_distribute(self): self.filter_queue.put(("queue_id0", [("SYS", "JOUR")], True)) self.filter_queue.put(("queue_id1", [("DA1", "DC1")], True)) self.filter_queue.put(("queue_id2", [("SYS", None)], True)) self.filter_queue.put(("queue_id3", [(None, "DC1")], True)) self.filter_queue.put(("queue_id4", [(None, None)], True)) time.sleep(0.01) # - simulate receiving of messages for _ in range(10): for message in create_messages(stream_multiple, from_file=True): self.handler.handle(message) self.assertIn(("SYS", "JOUR"), self.handler.context_map) self.assertIn(("DA1", "DC1"), self.handler.context_map) self.assertIn((None, None), self.handler.context_map) self.assertIn(("SYS", None), self.handler.context_map) self.assertIn((None, "DC1"), self.handler.context_map) try: # 60 == 10 messages of each for SYS, JOUR and None combinations + # 10 for (None,None) messages = [self.message_queue.get(timeout=0.01) for _ in range(60)] # these queues should not get any messages from other queues self.assertEqual(len([msg for qid, msg in messages if qid == 'queue_id0']), 10) self.assertEqual(len([msg for qid, msg in messages if qid == 'queue_id1']), 10) self.assertEqual(len([msg for qid, msg in messages if qid == 'queue_id2']), 10) self.assertEqual(len([msg for qid, msg in messages if qid == 'queue_id3']), 10) # this queue should get all messages self.assertEqual(len([msg for qid, msg in messages if qid == 'queue_id4']), 20) except Empty: # - we should not get an Empty for at least 40 messages self.fail() python-dlt-2.0/tests/dlt_message_performance_tests.py000066400000000000000000000023321354710635300232720ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. """Basic unittests for DLT messages""" import io import time from nose.tools import assert_less from dlt.dlt import DLTFilter from .utils import create_messages stream_one = io.BytesIO(b'5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00') stream_two = io.BytesIO(b'5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC2\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00') LOOPS = 100000 class TestsDLTMessagePerf(object): def setUp(self): self.msgs = [create_messages(stream_one) for i in range(int(LOOPS * 0.1))] self.msgs += [create_messages(stream_two) for i in range(int(LOOPS * 0.9))] def test_compare_dict(self): # with dict as other attrs = {"apid": "DA1", "ctid": "DC1"} for msg in self.msgs: msg.compare(other=attrs) def test_compare_filter(self): # with DLTFilter as other flt = DLTFilter() flt.add("DA1", "DC1") for msg in self.msgs: msg.compare(other=flt) def test_compare_mesage(self): # with dict as other other = create_messages(stream_one) for msg in self.msgs: msg.compare(other=other) python-dlt-2.0/tests/dlt_message_unit_tests.py000066400000000000000000000234211354710635300217520ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """Basic unittests for DLT messages""" import io import pickle import re try: from mock import patch, PropertyMock except ImportError: from unittest.mock import patch, PropertyMock from nose.tools import * from dlt.dlt import DLTMessage from .utils import create_messages, stream_one, stream_with_params, stream_multiple, msg_benoit, control_one class TestsDLTMessageUnit(object): def test_compare_default_attrs(self): attrs = {"extendedheader.apid": "DA1", "extendedheader.ctid": "DC1"} msg = create_messages(stream_one) assert_true(msg.compare(other=attrs)) assert_true(msg.compare(other={"extendedheader.ctid": "DC1"})) def test_equal(self): msg1 = create_messages(stream_one) msg2 = create_messages(stream_one) assert_equal(msg1, msg2) def test_easy_attributes(self): msg = create_messages(stream_one) assert_equal(msg.ecuid, "MGHS") assert_equal(msg.seid, 0) assert_equal(msg.tmsp, 372391.26500000001) assert_equal(msg.apid, "DA1") assert_equal(msg.ctid, "DC1") def test_compare(self): msg1 = create_messages(stream_one) msg2 = create_messages(stream_one) assert_true(msg1.compare(msg2)) assert_true(msg1.compare(other=msg2)) assert_true(msg1.compare(dict(apid="DA1", ctid="DC1"))) assert_false(msg1.compare(dict(apid="DA1", ctid="XX"))) def test_compare_regexp(self): msg1 = create_messages(stream_one) assert_true(msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*")))) assert_true(msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".connection_info ok.")))) assert_true(msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".connection_info ok.")))) assert_true(msg1.compare(dict(apid="DA1", ctid=re.compile(r"D.*"), payload_decoded=re.compile(r".*info ok.")))) assert_true(msg1.compare(dict(apid="DA1", ctid="DC1", payload_decoded=re.compile(r".*info ok.")))) assert_true(msg1.compare(dict(apid=re.compile(r"D.")))) assert_true(msg1.compare(dict(apid=re.compile(r"D.+")))) assert_true(msg1.compare(dict(apid=re.compile(r"D.")))) assert_false(msg1.compare(dict(apid=re.compile(r"X.")))) def test_compare_regexp_nsm(self): nsm = create_messages(io.BytesIO(b'5\x00\x00 MGHS\xdd\xf6e\xca&\x01NSM\x00DC1\x00\x02\x0f\x00\x00' b'\x00\x02\x00\x00\x00\x00')) nsma = create_messages(io.BytesIO(b'5\x00\x00 MGHS\xdd\xf6e\xca&\x01NSMADC1\x00\x02\x0f\x00\x00' b'\x00\x02\x00\x00\x00\x00')) assert_true(nsm.compare(dict(apid=re.compile("^NSM$")))) assert_false(nsma.compare(dict(apid=re.compile("^NSM$")))) assert_true(nsm.compare(dict(apid="NSM"))) assert_false(nsma.compare(dict(apid="NSM"))) assert_true(nsm.compare(dict(apid=re.compile("NSM")))) assert_true(nsma.compare(dict(apid=re.compile("NSM")))) @raises(Exception) def test_compare_regexp_throw(self): assert_true(nsm.compare(dict(apid=b"NSM"), regexp=True)) def test_compare_regexp_benoit(self): msg1 = create_messages(msg_benoit, from_file=True)[0] assert_true(msg1.compare({"apid": "DEMO", "ctid": "DATA", "payload_decoded": re.compile("Logging from the constructor of a global instance")})) def test_compare_two_msgs(self): msgs = create_messages(stream_multiple, from_file=True) assert_not_equal(msgs[0], msgs[-1]) def test_compare_other_not_modified(self): msg = create_messages(stream_one) other = dict(apid='XX', ctid='DC1') assert_false(msg.compare(other)) assert_equal(other, dict(apid='XX', ctid='DC1')) def test_compare_quick_return(self): msg = create_messages(stream_one) other = dict(apid=b'DA1', ctid=b'XX', ecuid=b'FOO') with patch('dlt.dlt.DLTMessage.ecuid', new_callable=PropertyMock) as ecuid: ecuid.return_value = b'FOO' assert_false(msg.compare(other)) ecuid.assert_not_called() def test_compare_matching_apid_ctid(self): msg = create_messages(stream_one) other = dict(apid='DA1', ctid='DC1', ecuid='FOO') with patch('dlt.dlt.DLTMessage.ecuid', new_callable=PropertyMock) as ecuid: ecuid.return_value = 'BAR' assert_false(msg.compare(other)) ecuid.assert_called_once() ecuid.return_value = 'FOO' assert_true(msg.compare(other)) assert_equal(ecuid.call_count, 2) def test_pickle_api(self): messages = create_messages(stream_multiple, from_file=True) for msg in messages: assert_equal(msg, pickle.loads(pickle.dumps(msg))) def test_from_bytes_control(self): msg = DLTMessage.from_bytes(b"DLT\x011\xd9PY(<\x08\x00MGHS5\x00\x00 MGHS\x00\x00\x96\x85&\x01DA1\x00DC1" b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00") assert_equal(msg.apid, "DA1") assert_equal(msg.ctid, "DC1") assert_equal(msg.ecuid, "MGHS") assert_equal(msg.tmsp, 3.8533) assert_equal(msg.storage_timestamp, 1498470705.539688) assert_equal(msg.payload_decoded, "[connection_info ok] connected \x00\x00\x00\x00") def test_from_bytes_log_multipayload(self): msg = DLTMessage.from_bytes(b"DLT\x011\xd9PYfI\x08\x00MGHS=\x00\x000MGHS\x00\x00\x03\x1e\x00\x00\x94\xc8A" b"\x01MON\x00CPUS\x00\x02\x00\x00\x10\x004 online cores\n\x00") assert_equal(msg.apid, "MON") assert_equal(msg.ctid, "CPUS") assert_equal(msg.ecuid, "MGHS") assert_equal(msg.tmsp, 3.8088) assert_equal(msg.payload_decoded, "4 online cores\n") def test_sort_data_control(self): data = ( b"DLT\x011\xd9PY(<\x08\x00MGHS5\x00\x00 MGHS\x00\x00\x96\x85&\x01DA1\x00DC1" b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00" ) tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data) assert_equal(tmsp, 3.8533) assert_equal(length, len(data)) assert_equal(apid, "DA1") assert_equal(ctid, "DC1") def test_sort_data_log_multipayload(self): data = ( b"DLT\x011\xd9PYfI\x08\x00MGHS=\x00\x000MGHS\x00\x00\x03\x1e\x00\x00\x94\xc8A" b"\x01MON\x00CPUS\x00\x02\x00\x00\x10\x004 online cores\n\x00" ) tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data) assert_equal(tmsp, 3.8088) assert_equal(length, len(data)) assert_equal(apid, "MON") assert_equal(ctid, "CPUS") def test_largelog(self): data = ( b'DLT\x012\xd9PY)\x00\x01\x00MGHS=o\x02\x04MGHS\x00\x00\x03\x1e\x00\x00\x9e\xb7' b'A\x01MON\x00THRD\x00\x02\x00\x00\xe4\x01Process avb_streamhandl with pid: 307 ' b'"/usr/bin/avb_streamhandler_app_someip -s pluginias-media_transport-avb_config' b'uration_bmw_mgu.so --bg setup --target Harman_MGU_B1 -p MGU_ICAM -k local.alsa' b'.baseperiod=256 -k ptp.loopcount=0 -k ptp.pdelaycount=0 -k ptp.synccount=0 -k ' b'sched.priority=20 -k tspec.vlanprio.low=3 -k tspec.presentation.time.offset.lo' b'w=2200000 -k tspec.interval.low=1333000 -k debug.loglevel._RXE=4 -k alsa.group' b'name=mgu_avbsh -n socnet0 -b 2 " started 2401 msec ago\x00' ) msg = DLTMessage.from_bytes(data) assert_equal(msg.apid, "MON") assert_equal(msg.ctid, "THRD") assert_equal(msg.ecuid, "MGHS") assert_equal(msg.tmsp, 4.0631) assert_equal( msg.payload_decoded, 'Process avb_streamhandl with pid: 307 "/usr/bin/avb_streamhandler_app_someip -s ' 'pluginias-media_transport-avb_configuration_bmw_mgu.so --bg setup --target Harman_MGU_B1 -p MGU_ICAM ' '-k local.alsa.baseperiod=256 -k ptp.loopcount=0 -k ptp.pdelaycount=0 -k ptp.synccount=0 ' '-k sched.priority=20 -k tspec.vlanprio.low=3 -k tspec.presentation.time.offset.low=2200000 ' '-k tspec.interval.low=1333000 -k debug.loglevel._RXE=4 -k alsa.groupname=mgu_avbsh -n socnet0 ' '-b 2 " started 2401 msec ago' ) tmsp, length, apid, ctid = DLTMessage.extract_sort_data(data) assert_equal(msg.tmsp, tmsp) assert_equal(len(msg.to_bytes()), length) assert_equal(msg.apid, apid) assert_equal(msg.ctid, ctid) class TestsPayload(object): def test_split(self): msg = create_messages(stream_with_params, from_file=True)[0] payload = msg.payload assert_equal(len(payload), msg.noar) assert_equal(payload[0], b"CLevelMonitor::notification() => commandType") assert_equal(payload[1], 3) assert_equal(payload[2], b"deviceId") assert_equal(payload[3], 5) assert_equal(payload[4], b"value") assert_equal(payload[5], 4074) assert_equal(payload[6], b"simulation status") assert_equal(payload[7], 0) assert_raises(IndexError, payload.__getitem__, 8) class TestsControl(object): def test_load(self): msg = create_messages(control_one, from_file=True)[0] assert_equal(msg.apid, "DA1") assert_equal(msg.ctid, "DC1") assert_equal(msg.is_mode_verbose, 0) assert_equal(msg.payload_decoded, "[get_log_info 7] get_log_info, 07, 01 00 48 44 44 4d 01 00 43 41 50 49 ff" " ff 04 00 43 41 50 49 06 00 68 64 64 6d 67 72 72 65 6d 6f") python-dlt-2.0/tests/static_check_tests.py000066400000000000000000000026311354710635300210500ustar00rootroot00000000000000# Copyright (C) 2015. BMW Car IT GmbH. All rights reserved. """Static checks for python-dlt""" import os import unittest from nose.tools import assert_equal from dlt import run_command def search_bin_path(search_bin, search_path): for path in search_path: if all(os.path.exists(os.path.join(path, bin_name)) for bin_name in search_bin): return path raise ValueError("Could not find path for {}".format(search_bin)) class TestCodingStyleCheck(unittest.TestCase): def setUp(self): search_bin = ['pycodestyle', 'pylint'] search_path = ['/opt/nativesysroot/usr/bin', '/usr/local/bin'] tox_bin_path = os.getenv('PATH').split(':')[0] if tox_bin_path.startswith(os.path.join(os.getcwd(), '.tox')): search_path.append(tox_bin_path) self.prefix_path = search_bin_path(search_bin, search_path) def test_check_pycodestyle(self): command = [os.path.join(self.prefix_path, "pycodestyle"), "dlt"] stdout, stderr, return_code = run_command(command, shell=False) assert_equal(return_code, 0, "Stdout: {}\nStderr: {}".format(stdout, stderr)) def test_check_pylint(self): command = [os.path.join(self.prefix_path, "pylint"), "--rcfile", "setup.cfg", "dlt"] stdout, stderr, return_code = run_command(command, shell=False) assert_equal(return_code, 0, "Stdout: {}\nStderr: {}".format(stdout, stderr)) python-dlt-2.0/tests/utils.py000066400000000000000000000217751354710635300163540ustar00rootroot00000000000000# Copyright (C) 2016. BMW Car IT GmbH. All rights reserved. """Test helpers and data""" import atexit import ctypes import io import tempfile import os from dlt.dlt import DLTClient, load stream_one = io.BytesIO(b'5\x00\x00 MGHS\xdd\xf6e\xca&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00') stream_with_params = (b"DLT\x01\xc2<\x85W\xc7\xc5\x02\x00MGHS=r\x00\xa0MGHS\x00\x00\x02B\x00X\xd4\xf1A\x08" b"ENV\x00LVLM\x00\x02\x00\x00-\x00CLevelMonitor::notification() => commandType\x00#" b"\x00\x00\x00\x03\x00\x00\x00\x00\x02\x00\x00\t\x00deviceId\x00#\x00\x00\x00\x05\x00" b"\x00\x00\x00\x02\x00\x00\x06\x00value\x00#\x00\x00\x00\xea\x0f\x00\x00\x00\x02\x00" b"\x00\x12\x00simulation status\x00#\x00\x00\x00\x00\x00\x00\x00") stream_multiple = (b"DLT\x01#o\xd1WD>\x0c\x00MGHS5\x00\x00YMGHS\x00\x01\x80\xd1&\x01DA1\x00DC1\x00\x03\x00\x00\x00" b"\x07\x01\x00SYS\x00\x01\x00FILE\xff\xff\x16\x00File transfer manager.\x12\x00" b"DLT System ManagerremoDLT\x01#o\xd1Wo>\x0c\x00MGHS=\x00\x01PMGHS\x00\x00\x03\xf4\x00" b"\x01i\xa6A\x05SYS\x00JOUR\x00\x02\x00\x00\x1b\x002011/11/11 11:11:18.005274\x00\x00\x02\x00\x00" b"\t\x006.005274\x00\x00\x02\x00\x00\x16\x00systemd-journal[748]:\x00\x00\x02\x00\x00\x0f\x00" b"Informational:\x00\x00\x02\x00\x00\xcf\x00Runtime journal (/run/log/journal/) is currently" b" using 8.0M.\nMaximum allowed usage is set to 385.9M.\nLeaving at least 578.8M free (of" b" currently available 3.7G of space).\nEnforced usage limit is thus 385.9M.\x00") msg_benoit = (b"DLT\x01\xa5\xd1\xceW\x90\xb9\r\x00MGHS=\x00\x00RMGHS\x00\x00\n[\x00\x0f\x9b#A\x01DEMODATA\x00" b"\x82\x00\x002\x00Logging from the constructor of a global instance\x00") control_one = (b'DLT\x01#o\xd1W\x99!\x0c\x00MGHS5\x00\x00;MGHS\x00\x01\x7f\xdb&\x01DA1\x00DC1\x00\x03' b'\x00\x00\x00\x07\x01\x00HDDM\x01\x00CAPI\xff\xff\x04\x00CAPI\x06\x00hddmgrremo') # DLT file with invalid storage header and frames file_storage_clean = (b"DLT\x01\x9a\xc6\xbfW\x020\t\x00MGHS5\x00\x00 MGHS\x00\x02\x8aC&\x01DA1\x00DC1" b"\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00DLT\x01\x9a\xc6\xbfWoA\t\x00MGHS=" b"\x00\x00NMGHS\x00\x00\x049\x00\x01p\n\x00MGHS5\x00\x00 MGHS" # not to buffer b"\x00\x00mj&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00" ) file_with_lifecycles_without_start = (b"DLT\x01\xc5\x82\xdaX\x19\x93\r\x00XORA'\x01\x00\x1bXORA" # trace to buffer b"\x16\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x04\x00\x00\x00\x00" b"DLT\x01\xc5\x82\xdaXQi\x0e\x00MGHS5\x00\x00 MGHS" # trace to buffer b"\x00\x03U\xe0&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00" b"DLT\x01m\xc2\x91Y\xad\xe4\x07\x00MGHS=\x01\x00zMGHS" # random trace b"\x00\x00\x02\xab\x00\x00@VA\x01DLTDINTM\x00\x02\x00\x00Z\x00" b"ApplicationID 'DBSY' registered for PID 689, Description=DBus" b" Logging|SysInfra|Log&Trace\n\x00" b"DLT\x01\xed\xc2\x91Y\x0f\xf0\x08\x00MGHS5\x00\x00 MGHS" # trace to buffer b"\x00\x00\x9dC&\x01DA1\x00DC1\x00\x02\x0f\x00\x00\x00\x02\x00\x00\x00\x00" b"DLT\x01\xed\xc2\x91Y\x17.\n\x00MGHS=\x00\x00NMGHS" # new lifecycle b"\x00\x00\x02\xae\x00\x00@/A\x01DLTDINTM\x00\x02\x00\x00.\x00" b"Daemon launched. Starting to output traces...\x00" ) def create_messages(stream, from_file=False): if from_file is False: stream.seek(0) buf = stream.read() client = DLTClient() client.receiver.buf = ctypes.create_string_buffer(buf) client.receiver.bytesRcvd = len(buf) return client.read_message() _, tmpname = tempfile.mkstemp(suffix=b"") tmpfile = open(tmpname, "wb") tmpfile.write(stream) tmpfile.flush() tmpfile.seek(0) tmpfile.close() atexit.register(os.remove, tmpname) msgs = load(tmpname) return msgs python-dlt-2.0/tox.ini000066400000000000000000000012601354710635300147760ustar00rootroot00000000000000[tox] envlist = py3,py27,lint skipdist = True [testenv] deps = -r{toxinidir}/requirements.txt commands = coverage erase coverage run {envbindir}/nosetests {posargs} coverage report [testenv:py27] # Python 2.7 specific requirements deps = {[testenv]deps} -r{toxinidir}/requirements.txt -r{toxinidir}/requirements2.7.txt [testenv:py3] # Python 3 specific requirements deps = {[testenv]deps} -r{toxinidir}/requirements.txt -r{toxinidir}/requirements3.txt [testenv:lint] skip_install = True skipsdist = True deps = {[testenv]deps} -r{toxinidir}/requirements3.txt commands = pycodestyle dlt pylint --rcfile=setup.cfg dlt