pax_global_header00006660000000000000000000000064146044303600014512gustar00rootroot0000000000000052 comment=15e283f0bd0d7d6ef197ca121b298dfe22ed358f acstore-20240407/000077500000000000000000000000001460443036000134165ustar00rootroot00000000000000acstore-20240407/.github/000077500000000000000000000000001460443036000147565ustar00rootroot00000000000000acstore-20240407/.github/workflows/000077500000000000000000000000001460443036000170135ustar00rootroot00000000000000acstore-20240407/.github/workflows/test_docker.yml000066400000000000000000000046111460443036000220460ustar00rootroot00000000000000# Run tests on Fedora and Ubuntu Docker images using GIFT CORP and GIFT PPA on commit name: test_docker on: [push] permissions: read-all jobs: test_fedora: runs-on: ubuntu-latest strategy: matrix: version: ['39', '40'] container: image: registry.fedoraproject.org/fedora:${{ matrix.version }} steps: - uses: actions/checkout@v3 - name: Set up container run: | dnf install -y dnf-plugins-core langpacks-en - name: Install dependencies run: | dnf copr -y enable @gift/dev dnf install -y @development-tools python3 python3-build python3-devel python3-pyyaml python3-setuptools python3-wheel - name: Run tests env: LANG: C.utf8 run: | python3 ./run_tests.py - name: Run end-to-end tests run: | if test -f tests/end-to-end.py; then PYTHONPATH=. python3 ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi - name: Build source distribution (sdist) run: | python3 -m build --no-isolation --sdist - name: Build binary distribution (wheel) run: | python3 -m build --no-isolation --wheel test_ubuntu: runs-on: ubuntu-latest strategy: matrix: version: ['24.04'] container: image: ubuntu:${{ matrix.version }} steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies run: | add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential python3 python3-build python3-dev python3-distutils python3-pip python3-setuptools python3-wheel python3-yaml - name: Run tests env: LANG: en_US.UTF-8 run: | python3 ./run_tests.py - name: Run end-to-end tests env: LANG: en_US.UTF-8 run: | if test -f tests/end-to-end.py; then PYTHONPATH=. python3 ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi - name: Build source distribution (sdist) run: | python3 -m build --no-isolation --sdist - name: Build binary distribution (wheel) run: | python3 -m build --no-isolation --wheel acstore-20240407/.github/workflows/test_docs.yml000066400000000000000000000025131460443036000215260ustar00rootroot00000000000000# Run docs tox tests on Ubuntu Docker images using GIFT PPA name: test_docs on: pull_request: branches: - main push: branches: - main permissions: read-all jobs: build: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.12' toxenv: 'docs' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} acstore-20240407/.github/workflows/test_tox.yml000066400000000000000000000077371460443036000214250ustar00rootroot00000000000000# Run tox tests on Ubuntu Docker images using GIFT PPA name: test_tox on: pull_request: branches: - main push: branches: - main permissions: read-all jobs: build: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.8' toxenv: 'py38,wheel' - python-version: '3.9' toxenv: 'py39,wheel' - python-version: '3.10' toxenv: 'py310,wheel' - python-version: '3.11' toxenv: 'py311,wheel' - python-version: '3.12' toxenv: 'py312,wheel' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} coverage: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.10' toxenv: 'coverage' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests with coverage env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} - name: Upload coverage report to Codecov uses: codecov/codecov-action@v3 lint: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.12' toxenv: 'lint' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run linter env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} acstore-20240407/.gitignore000066400000000000000000000004211460443036000154030ustar00rootroot00000000000000# Files to ignore by git. # Back-up files *~ *.swp # Generic auto-generated build files *.pyc *.pyo # Specific auto-generated build files /.tox /__pycache__ /acstore.egg-info /build /dist # Code review files /.review # Test coverage files .coverage tests-coverage.txt acstore-20240407/.pylintrc000066400000000000000000000551471460443036000152770ustar00rootroot00000000000000# Pylint 3.0.x configuration file # # This file is generated by l2tdevtools update-dependencies.py, any dependency # related changes should be made in dependencies.ini. [MAIN] # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Clear in-memory caches upon conclusion of linting. Useful if running pylint # in a server-like mode. clear-cache-post-run=no # Load and enable all available extensions. Use --list-extensions to see a list # all available extensions. #enable-all-extensions= # In error mode, messages with a category besides ERROR or FATAL are # suppressed, and no reports are done by default. Error mode is compatible with # disabling specific errors. #errors-only= # Always return a 0 (non-error) status code, even if lint errors are found. # This is primarily useful in continuous integration scripts. #exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist= # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. fail-on= # Specify a score threshold under which the program will exit with error. fail-under=10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. #from-stdin= # Files or directories to be skipped. They should be base names, not paths. ignore=CVS # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows # format. Because '\\' represents the directory delimiter on Windows systems, # it can't be used as an escape character. ignore-paths= # Files or directories matching the regular expression patterns are skipped. # The regex matches against base names, not paths. The default value ignores # Emacs file locks ignore-patterns=^\.# # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins=pylint.extensions.docparams # Pickle collected data for later comparisons. persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.12 # Discover python modules and packages in the file system subtree. # recursive=no recursive=yes # Add paths to the list of the source roots. Supports globbing patterns. The # source root is an absolute path or a path relative to the current working # directory used to determine a package namespace for modules located under the # source root. source-roots= # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # In verbose mode, extra non-checker-related info will be displayed. #verbose= [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set # naming style. #argument-rgx= argument-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= attr-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. #class-attribute-rgx= class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$ # Naming style matching correct class constant names. class-const-naming-style=UPPER_CASE # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. #class-const-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. If left empty, class names will be checked with the set naming style. #class-rgx= class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming # style. #const-rgx= const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set # naming style. #function-rgx= function-rgx=[A-Z_][a-zA-Z0-9_]*$ # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. #inlinevar-rgx= inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= method-rgx=(test|[A-Z_])[a-zA-Z0-9_]*$ # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. #module-rgx= module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Regular expression matching correct type alias names. If left empty, type # alias names will be checked with the set naming style. #typealias-rgx= # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set # naming style. #variable-rgx= variable-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ [CLASSES] # Warn about protected attribute access inside special methods check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. # valid-metaclass-classmethod-first-arg=mcs valid-metaclass-classmethod-first-arg=cls [DESIGN] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) exclude-too-few-public-methods= # List of qualified class names to ignore when counting class parents (see # R0901) ignored-parents= # Maximum number of arguments for function / method. # max-args=5 max-args=10 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=12 # Maximum number of locals for function / method body. max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body. max-returns=6 # Maximum number of statements in function / method body. max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [EXCEPTIONS] # Exceptions that will emit a warning when caught. overgeneral-exceptions=builtins.BaseException,builtins.Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). # indent-string=' ' indent-string=' ' # Maximum number of characters on a single line. # max-line-length=100 max-line-length=80 # Maximum number of lines in a module. max-module-lines=1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow explicit reexports by alias from a package __init__. allow-reexport-from-package=no # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules= # Output a graph (.gv or any supported image format) of external dependencies # to the given file (report RP0402 must not be disabled). ext-import-graph= # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be # disabled). import-graph= # Output a graph (.gv or any supported image format) of internal dependencies # to the given file (report RP0402 must not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, # UNDEFINED. confidence=HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=assignment-from-none, bad-inline-option, consider-using-f-string, deprecated-pragma, duplicate-code, file-ignored, fixme, locally-disabled, logging-format-interpolation, logging-fstring-interpolation, missing-param-doc, raise-missing-from, raw-checker-failed, super-with-arguments, suppressed-message, too-few-public-methods, too-many-ancestors, too-many-boolean-expressions, too-many-branches, too-many-instance-attributes, too-many-lines, too-many-locals, too-many-nested-blocks, too-many-public-methods, too-many-return-statements, too-many-statements, unsubscriptable-object, use-implicit-booleaness-not-comparison-to-string, use-implicit-booleaness-not-comparison-to-zero, useless-object-inheritance, useless-suppression, use-symbolic-message-instead # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [METHOD_ARGS] # List of qualified names (i.e., library.method) which require a timeout # parameter e.g. 'requests.api.get,requests.api.post' timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit,argparse.parse_error [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each # category, as well as 'statement' which is the total number of statements # analyzed. This score is used by the global evaluation report (RP0004). evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. msg-template= # Set the output format. Available formats are: text, parseable, colorized, # json2 (improved json format), json (old json format) and msvs (visual # studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. # score=yes score=no [SIMILARITIES] # Comments are removed from the similarity computation ignore-comments=yes # Docstrings are removed from the similarity computation ignore-docstrings=yes # Imports are removed from the similarity computation ignore-imports=yes # Signatures are removed from the similarity computation ignore-signatures=yes # Minimum lines number of a similarity. min-similarity-lines=4 [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: en_AG (hunspell), en_AU # (hunspell), en_BS (hunspell), en_BW (hunspell), en_BZ (hunspell), en_CA # (hunspell), en_DK (hunspell), en_GB (hunspell), en_GH (hunspell), en_HK # (hunspell), en_IE (hunspell), en_IN (hunspell), en_JM (hunspell), en_MW # (hunspell), en_NA (hunspell), en_NG (hunspell), en_NZ (hunspell), en_PH # (hunspell), en_SG (hunspell), en_TT (hunspell), en_US (hunspell), en_ZA # (hunspell), en_ZM (hunspell), en_ZW (hunspell). spelling-dict= # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins=no-member, not-async-context-manager, not-context-manager, attribute-defined-outside-init # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx=.*[Mm]ixin # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of names allowed to shadow builtins allowed-redefined-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io acstore-20240407/.readthedocs.yaml000066400000000000000000000005021460443036000166420ustar00rootroot00000000000000# Read the Docs configuration file for Sphinx projects # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 build: os: ubuntu-22.04 tools: python: "3.12" sphinx: configuration: docs/conf.py fail_on_warning: false python: install: - requirements: docs/requirements.txt acstore-20240407/.yamllint.yaml000066400000000000000000000002251460443036000162100ustar00rootroot00000000000000extends: default rules: line-length: disable indentation: spaces: consistent indent-sequences: false check-multi-line-strings: true acstore-20240407/ACKNOWLEDGEMENTS000066400000000000000000000010771460443036000157000ustar00rootroot00000000000000Acknowledgements: acstore Copyright 2017 The ACStore Project Authors. Please see the AUTHORS file for details on individual authors. ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write plaso storage files. ACStore, originates from the Plaso project [https://github.com/log2timeline/plaso]. It was largely rewritten and made into a stand-alone project to provide more flexibility and allow other projects to make use of the plaso storage objects. ACStore is currently developed and maintained by: * Daniel White * Joachim Metz acstore-20240407/AUTHORS000066400000000000000000000005601460443036000144670ustar00rootroot00000000000000# Names should be added to this file with this pattern: # # For individuals: # Name (email address) # # For organizations: # Organization (fnmatch pattern) # # See python fnmatch module documentation for more information. Google Inc. (*@google.com) Kristinn Gudjonsson (kiddi@kiddaland.net) Joachim Metz (joachim.metz@gmail.com) Daniel White (onager@deerpie.com) acstore-20240407/LICENSE000066400000000000000000000261361460443036000144330ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. acstore-20240407/MANIFEST.in000066400000000000000000000007561460443036000151640ustar00rootroot00000000000000include ACKNOWLEDGEMENTS AUTHORS LICENSE README include dependencies.ini run_tests.py utils/__init__.py utils/dependencies.py include utils/check_dependencies.py include requirements.txt test_requirements.txt exclude .gitignore exclude *.pyc recursive-include config * recursive-exclude acstore *.pyc # The test scripts are not required in a binary distribution package they # are considered source distribution files and excluded in find_package() # in setup.py. recursive-include tests *.py acstore-20240407/README000066400000000000000000000003711460443036000142770ustar00rootroot00000000000000ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write Attribute Container stores, such as Plaso storage files. For more information see: * Project documentation: https://acstore.readthedocs.io/en/latest acstore-20240407/acstore.ini000066400000000000000000000006331460443036000155610ustar00rootroot00000000000000[project] name: acstore name_description: ACStore status: alpha maintainer: Log2Timeline maintainers homepage_url: https://github.com/log2timeline/acstore description_short: Attribute Container Storage (ACStore). description_long: ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write attribute container storage files. acstore-20240407/acstore/000077500000000000000000000000001460443036000150565ustar00rootroot00000000000000acstore-20240407/acstore/__init__.py000066400000000000000000000003271460443036000171710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Attribute Container Storage (ACStore). ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write plaso storage files. """ __version__ = '20240407' acstore-20240407/acstore/containers/000077500000000000000000000000001460443036000172235ustar00rootroot00000000000000acstore-20240407/acstore/containers/__init__.py000066400000000000000000000000301460443036000213250ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/acstore/containers/interface.py000066400000000000000000000147271460443036000215500ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The attribute container interface.""" class AttributeContainerIdentifier(object): """The attribute container identifier. The identifier is used to uniquely identify attribute containers. The value should be unique relative to an attribute container store. Attributes: name (str): name of the table (attribute container). sequence_number (int): sequence number of the attribute container. """ def __init__(self, name=None, sequence_number=None): """Initializes an attribute container identifier. Args: name (Optional[str]): name of the table (attribute container). sequence_number (Optional[int]): sequence number of the attribute container. """ super(AttributeContainerIdentifier, self).__init__() self.name = name self.sequence_number = sequence_number def CopyFromString(self, identifier_string): """Copies the identifier from a string representation. Args: identifier_string (str): string representation. """ self.name, sequence_number = identifier_string.split('.') self.sequence_number = int(sequence_number, 10) def CopyToString(self): """Copies the identifier to a string representation. Returns: str: unique identifier or None. """ if self.name is not None and self.sequence_number is not None: return f'{self.name:s}.{self.sequence_number:d}' return None class AttributeContainer(object): """The attribute container interface. This is the base class for those object that exists primarily as a container of attributes with basic accessors and mutators. The CONTAINER_TYPE class attribute contains a string that identifies the container type, for example the container type "event" identifiers an event object. Attributes are public class members of a serializable type. Protected and private class members are not to be serialized, with the exception of those defined in _SERIALIZABLE_PROTECTED_ATTRIBUTES. """ CONTAINER_TYPE = None # Names of protected attributes, those with a leading underscore, that # should be serialized. _SERIALIZABLE_PROTECTED_ATTRIBUTES = [] def __init__(self): """Initializes an attribute container.""" super(AttributeContainer, self).__init__() self._identifier = AttributeContainerIdentifier( name=self.CONTAINER_TYPE, sequence_number=id(self)) def CopyFromDict(self, attributes): """Copies the attribute container from a dictionary. Args: attributes (dict[str, object]): attribute values per name. """ for attribute_name, attribute_value in attributes.items(): # Not using startswith to improve performance. if (attribute_name[0] != '_' or attribute_name in self._SERIALIZABLE_PROTECTED_ATTRIBUTES): self.__dict__[attribute_name] = attribute_value def CopyToDict(self): """Copies the attribute container to a dictionary. Returns: dict[str, object]: attribute values per name. """ return dict(self.GetAttributes()) def GetAttributeNames(self): """Retrieves the names of all attributes. Returns: list[str]: attribute names. """ attribute_names = list(self._SERIALIZABLE_PROTECTED_ATTRIBUTES) for attribute_name in self.__dict__: # Not using startswith to improve performance. if attribute_name[0] != '_': attribute_names.append(attribute_name) return attribute_names def GetAttributes(self): """Retrieves the attribute names and values. Attributes that are set to None are ignored. Yields: tuple[str, object]: attribute name and value. """ for attribute_name, attribute_value in self.__dict__.items(): # Not using startswith to improve performance. if attribute_value is not None and ( attribute_name[0] != '_' or attribute_name in self._SERIALIZABLE_PROTECTED_ATTRIBUTES): yield attribute_name, attribute_value def GetAttributeValuesHash(self): """Retrieves a comparable string of the attribute values. Returns: int: hash of comparable string of the attribute values. """ return hash(self.GetAttributeValuesString()) def GetAttributeValuesString(self): """Retrieves a comparable string of the attribute values. Returns: str: comparable string of the attribute values. """ attributes = [] for attribute_name, attribute_value in sorted(self.__dict__.items()): # Not using startswith to improve performance. if attribute_value is not None and ( attribute_name[0] != '_' or attribute_name in self._SERIALIZABLE_PROTECTED_ATTRIBUTES): if isinstance(attribute_value, dict): attribute_value = sorted(attribute_value.items()) elif isinstance(attribute_value, bytes): attribute_value = repr(attribute_value) attributes.append(f'{attribute_name:s}: {attribute_value!s}') return ', '.join(attributes) def GetIdentifier(self): """Retrieves the identifier. The identifier is a storage specific value that should not be serialized. Returns: AttributeContainerIdentifier: a unique identifier for the container. """ return self._identifier def MatchesExpression(self, expression): """Determines if an attribute container matches the expression. Args: expression (code|str): expression. Returns: bool: True if the attribute container matches the expression, False otherwise. """ result = not expression if expression: namespace = {} for attribute_name, attribute_value in self.__dict__.items(): # Not using startswith to improve performance. if attribute_value is not None and ( attribute_name[0] != '_' or attribute_name in self._SERIALIZABLE_PROTECTED_ATTRIBUTES): if isinstance(attribute_value, AttributeContainerIdentifier): attribute_value = attribute_value.CopyToString() namespace[attribute_name] = attribute_value # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: result = eval(expression, namespace) # pylint: disable=eval-used except Exception: # pylint: disable=broad-except pass return result def SetIdentifier(self, identifier): """Sets the identifier. The identifier is a storage specific value that should not be serialized. Args: identifier (AttributeContainerIdentifier): identifier. """ self._identifier = identifier acstore-20240407/acstore/containers/manager.py000066400000000000000000000073501460443036000212140ustar00rootroot00000000000000# -*- coding: utf-8 -*- """This file contains the attribute container manager class.""" class AttributeContainersManager(object): """Class that implements the attribute container manager.""" _attribute_container_classes = {} @classmethod def CreateAttributeContainer(cls, container_type): """Creates an instance of a specific attribute container type. Args: container_type (str): container type. Returns: AttributeContainer: an instance of attribute container. Raises: ValueError: if the container type is not supported. """ container_class = cls._attribute_container_classes.get( container_type, None) if not container_class: raise ValueError(f'Unsupported container type: {container_type:s}') return container_class() @classmethod def DeregisterAttributeContainer(cls, attribute_container_class): """Deregisters an attribute container class. The attribute container classes are identified based on their lower case container type. Args: attribute_container_class (type): attribute container class. Raises: KeyError: if attribute container class is not set for the corresponding container type. """ container_type = attribute_container_class.CONTAINER_TYPE.lower() if container_type not in cls._attribute_container_classes: raise KeyError(( f'Attribute container class not set for container type: ' f'{attribute_container_class.CONTAINER_TYPE:s}.')) del cls._attribute_container_classes[container_type] @classmethod def GetContainerTypes(cls): """Retrieves the container types of the registered attribute containers. Returns: list[str]: container types. """ return list(cls._attribute_container_classes.keys()) @classmethod def GetSchema(cls, container_type): """Retrieves the schema of a registered attribute container. Args: container_type (str): attribute container type. Returns: dict[str, str]: attribute container schema or an empty dictionary if no schema available. Raises: ValueError: if the container type is not supported. """ container_class = cls._attribute_container_classes.get( container_type, None) if not container_class: raise ValueError(f'Unsupported container type: {container_type!s}') return getattr(container_class, 'SCHEMA', {}) @classmethod def RegisterAttributeContainer(cls, attribute_container_class): """Registers an attribute container class. The attribute container classes are identified based on their lower case container type. Args: attribute_container_class (type): attribute container class. Raises: KeyError: if attribute container class is already set for the corresponding container type. """ container_type = attribute_container_class.CONTAINER_TYPE.lower() if container_type in cls._attribute_container_classes: raise KeyError(( f'Attribute container class already set for container type: ' f'{attribute_container_class.CONTAINER_TYPE:s}.')) cls._attribute_container_classes[container_type] = attribute_container_class @classmethod def RegisterAttributeContainers(cls, attribute_container_classes): """Registers attribute container classes. The attribute container classes are identified based on their lower case container type. Args: attribute_container_classes (list[type]): attribute container classes. Raises: KeyError: if attribute container class is already set for the corresponding container type. """ for attribute_container_class in attribute_container_classes: cls.RegisterAttributeContainer(attribute_container_class) acstore-20240407/acstore/errors.py000066400000000000000000000002571460443036000167500ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The error objects.""" class Error(Exception): """The error interface.""" class ParseError(Error): """Raised when a parse error occurred.""" acstore-20240407/acstore/fake_store.py000066400000000000000000000145011460443036000175530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Fake (in-memory only) attribute container store for testing.""" import ast import collections import copy from acstore import interface from acstore.containers import interface as containers_interface class FakeAttributeContainerStore(interface.AttributeContainerStore): """Fake (in-memory only) attribute container store.""" def __init__(self): """Initializes a fake (in-memory only) store.""" super(FakeAttributeContainerStore, self).__init__() self._attribute_container_indexes = {} self._attribute_containers = {} self._is_open = False def _RaiseIfNotReadable(self): """Raises if the store is not readable. Raises: OSError: if the store cannot be read from. IOError: if the store cannot be read from. """ if not self._is_open: raise IOError('Unable to read from closed storage writer.') def _RaiseIfNotWritable(self): """Raises if the storage file is not writable. Raises: IOError: when the storage writer is closed. OSError: when the storage writer is closed. """ if not self._is_open: raise IOError('Unable to write to closed storage writer.') def _WriteExistingAttributeContainer(self, container): """Writes an existing attribute container to the store. Args: container (AttributeContainer): attribute container. Raises: IOError: if an unsupported identifier is provided or if the attribute container does not exist. OSError: if an unsupported identifier is provided or if the attribute container does not exist. """ identifier = container.GetIdentifier() lookup_key = identifier.CopyToString() containers = self._attribute_containers.get(container.CONTAINER_TYPE, None) if containers is None or lookup_key not in containers: raise IOError(( f'Missing attribute container: {container.CONTAINER_TYPE:s} with ' f'identifier: {lookup_key:s}')) containers[lookup_key] = container def _WriteNewAttributeContainer(self, container): """Writes a new attribute container to the store. Args: container (AttributeContainer): attribute container. """ containers = self._attribute_containers.get(container.CONTAINER_TYPE, None) if containers is None: containers = collections.OrderedDict() self._attribute_containers[container.CONTAINER_TYPE] = containers container_indexes = self._attribute_container_indexes.get( container.CONTAINER_TYPE, None) if container_indexes is None: container_indexes = [] self._attribute_container_indexes[container.CONTAINER_TYPE] = ( container_indexes) next_sequence_number = self._GetAttributeContainerNextSequenceNumber( container.CONTAINER_TYPE) identifier = containers_interface.AttributeContainerIdentifier( name=container.CONTAINER_TYPE, sequence_number=next_sequence_number) container.SetIdentifier(identifier) lookup_key = identifier.CopyToString() # Make sure the fake storage preserves the state of the attribute container. containers[lookup_key] = copy.deepcopy(container) container_indexes.append(lookup_key) def Close(self): """Closes the store. Raises: IOError: if the store is already closed. OSError: if the store is already closed. """ if not self._is_open: raise IOError('Store already closed.') self._is_open = False def GetAttributeContainerByIdentifier(self, container_type, identifier): """Retrieves a specific type of container with a specific identifier. Args: container_type (str): container type. identifier (AttributeContainerIdentifier): attribute container identifier. Returns: AttributeContainer: attribute container or None if not available. """ containers = self._attribute_containers.get(container_type, {}) lookup_key = identifier.CopyToString() return containers.get(lookup_key, None) def GetAttributeContainerByIndex(self, container_type, index): """Retrieves a specific attribute container. Args: container_type (str): attribute container type. index (int): attribute container index. Returns: AttributeContainer: attribute container or None if not available. """ containers = self._attribute_containers.get(container_type, {}) number_of_containers = len(containers) if index < 0 or index >= number_of_containers: return None container_indexes = self._attribute_container_indexes.get( container_type, None) lookup_key = container_indexes[index] return containers[lookup_key] def GetAttributeContainers(self, container_type, filter_expression=None): """Retrieves a specific type of attribute containers. Args: container_type (str): attribute container type. filter_expression (Optional[str]): expression to filter the resulting attribute containers by. Yield: AttributeContainer: attribute container. """ if filter_expression: expression_ast = ast.parse(filter_expression, mode='eval') filter_expression = compile(expression_ast, '', mode='eval') for attribute_container in self._attribute_containers.get( container_type, {}).values(): if attribute_container.MatchesExpression(filter_expression): yield attribute_container def GetNumberOfAttributeContainers(self, container_type): """Retrieves the number of a specific type of attribute containers. Args: container_type (str): attribute container type. Returns: int: the number of containers of a specified type. """ containers = self._attribute_containers.get(container_type, {}) return len(containers) def HasAttributeContainers(self, container_type): """Determines if a store contains a specific type of attribute container. Args: container_type (str): attribute container type. Returns: bool: True if the store contains the specified type of attribute containers. """ containers = self._attribute_containers.get(container_type, {}) return bool(containers) def Open(self, **kwargs): """Opens the store. Raises: IOError: if the store is already opened. OSError: if the store is already opened. """ if self._is_open: raise IOError('Store already opened.') self._is_open = True acstore-20240407/acstore/helpers/000077500000000000000000000000001460443036000165205ustar00rootroot00000000000000acstore-20240407/acstore/helpers/__init__.py000066400000000000000000000000301460443036000206220ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/acstore/helpers/json_serializer.py000066400000000000000000000101231460443036000222710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Attribute container JSON serializer.""" from acstore.containers import interface as containers_interface from acstore.containers import manager as containers_manager from acstore.helpers import schema as schema_helper class AttributeContainerJSONSerializer(object): """Attribute container JSON serializer.""" _CONTAINERS_MANAGER = containers_manager.AttributeContainersManager @classmethod def ConvertAttributeContainerToJSON(cls, attribute_container): """Converts an attribute container object into a JSON dictioary. The resulting dictionary of the JSON serialized objects consists of: { '__type__': 'AttributeContainer' '__container_type__': ... ... } Here '__type__' indicates the object base type. In this case 'AttributeContainer'. '__container_type__' indicates the container type and rest of the elements of the dictionary that make up the attributes of the container. Args: attribute_container (AttributeContainer): attribute container. Returns: dict[str, object]: JSON serialized objects. """ try: schema = cls._CONTAINERS_MANAGER.GetSchema( attribute_container.CONTAINER_TYPE) except ValueError: schema = {} json_dict = { '__type__': 'AttributeContainer', '__container_type__': attribute_container.CONTAINER_TYPE} for attribute_name, attribute_value in attribute_container.GetAttributes(): data_type = schema.get(attribute_name, None) if data_type == 'AttributeContainerIdentifier' and isinstance( attribute_value, containers_interface.AttributeContainerIdentifier): attribute_value = attribute_value.CopyToString() else: serializer = schema_helper.SchemaHelper.GetAttributeSerializer( data_type, 'json') if serializer: attribute_value = serializer.SerializeValue(attribute_value) # JSON will not serialize certain runtime types like set, therefore # these are cast to list first. if isinstance(attribute_value, set): attribute_value = list(attribute_value) json_dict[attribute_name] = attribute_value return json_dict @classmethod def ConvertJSONToAttributeContainer(cls, json_dict): """Converts a JSON dictionary into an attribute container object. The dictionary of the JSON serialized objects consists of: { '__type__': 'AttributeContainer' '__container_type__': ... ... } Here '__type__' indicates the object base type. In this case 'AttributeContainer'. '__container_type__' indicates the container type and rest of the elements of the dictionary that make up the attributes of the container. Args: json_dict (dict[str, object]): JSON serialized objects. Returns: AttributeContainer: attribute container. """ # Use __container_type__ to indicate the attribute container type. container_type = json_dict.get('__container_type__', None) attribute_container = cls._CONTAINERS_MANAGER.CreateAttributeContainer( container_type) try: schema = cls._CONTAINERS_MANAGER.GetSchema(container_type) except ValueError: schema = {} supported_attribute_names = attribute_container.GetAttributeNames() for attribute_name, attribute_value in json_dict.items(): if attribute_name in ('__container_type__', '__type__'): continue # Be strict about which attributes to set. if attribute_name not in supported_attribute_names: continue data_type = schema.get(attribute_name, None) if data_type == 'AttributeContainerIdentifier': identifier = containers_interface.AttributeContainerIdentifier() identifier.CopyFromString(attribute_value) attribute_value = identifier else: serializer = schema_helper.SchemaHelper.GetAttributeSerializer( data_type, 'json') if serializer: attribute_value = serializer.DeserializeValue(attribute_value) setattr(attribute_container, attribute_name, attribute_value) return attribute_container acstore-20240407/acstore/helpers/schema.py000066400000000000000000000043021460443036000203310ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Schema helper.""" class SchemaHelper(object): """Schema helper.""" # Data types and corresponding attribute serializers per method. _data_types = { 'AttributeContainerIdentifier': None, 'bool': None, 'int': None, 'str': None, 'timestamp': None} @classmethod def DeregisterDataType(cls, data_type): """Deregisters a data type. Args: data_type (str): data type. Raises: KeyError: if the data type is not set. """ if data_type not in cls._data_types: raise KeyError(f'Data type: {data_type:s} not set.') del cls._data_types[data_type] @classmethod def GetAttributeSerializer(cls, data_type, serialization_method): """Retrieves a specific attribute serializer. Args: data_type (str): data type. serialization_method (str): serialization method. Returns: AttributeSerializer: attribute serializer or None if not available. """ serializers = cls._data_types.get(data_type, None) or {} return serializers.get(serialization_method, None) @classmethod def HasDataType(cls, data_type): """Determines is a specific data type is supported by the schema. Args: data_type (str): data type. Returns: bool: True if the data type is supported, or False otherwise. """ return data_type in cls._data_types @classmethod def RegisterDataType(cls, data_type, serializers): """Registers a data type. Args: data_type (str): data type. serializers (dict[str, AttributeSerializer]): attribute serializers per method. Raises: KeyError: if the data type is already set. """ if data_type in cls._data_types: raise KeyError(f'Data type: {data_type:s} already set.') cls._data_types[data_type] = serializers @classmethod def RegisterDataTypes(cls, data_types): """Registers data types. Args: data_types (dict[str: dict[str, AttributeSerializer]]): attribute serializers with method per data types. Raises: KeyError: if the data type is already set. """ for data_type, serializers in data_types.items(): cls.RegisterDataType(data_type, serializers) acstore-20240407/acstore/helpers/yaml_definitions_file.py000066400000000000000000000107661460443036000234400ustar00rootroot00000000000000# -*- coding: utf-8 -*- """YAML-based attribute container definitions file.""" import yaml from acstore import errors from acstore.containers import interface from acstore.helpers import schema # TODO: merge this into interface.AttributeContainer once Plaso has been # changed to no longer support attributes containers without a schema. class AttributeContainerWithSchema(interface.AttributeContainer): """Attribute container with schema.""" SCHEMA = {} class YAMLAttributeContainerDefinitionsFile(object): """YAML-based attribute container definitions file. A YAML-based attribute container definitions file contains one or more attribute container definitions. An attribute container definition consists of: name: windows_eventlog_message_file attributes: - name: path type: str - name: windows_path type: str Where: * name, unique identifier of the attribute container; * attributes, defines the attributes of the container. """ _SUPPORTED_DATA_TYPES = frozenset([ 'AttributeContainerIdentifier', 'bool', 'int', 'str', 'timestamp']) _SUPPORTED_KEYS = frozenset([ 'attributes', 'name']) def _ReadDefinition(self, definition_values): """Reads a definition from a dictionary. Args: definition_values (dict[str, object]): attribute container definition values. Returns: AttributeContainer: an attribute container. Raises: ParseError: if the definition is not set or incorrect. """ if not definition_values: raise errors.ParseError('Missing attribute container definition values.') different_keys = set(definition_values) - self._SUPPORTED_KEYS if different_keys: different_keys = ', '.join(different_keys) raise errors.ParseError(f'Undefined keys: {different_keys:s}') container_name = definition_values.get('name', None) if not container_name: raise errors.ParseError( 'Invalid attribute container definition missing name.') attributes = definition_values.get('attributes', None) if not attributes: raise errors.ParseError(( f'Invalid attribute container definition: {container_name:s} ' f'missing attributes.')) class_name = ''.join([ element.title() for element in container_name.split('_')]) class_attributes = {'CONTAINER_TYPE': container_name} container_schema = {} for attribute_index, attribute_values in enumerate(attributes): attribute_name = attribute_values.get('name', None) if not attribute_name: raise errors.ParseError(( f'Invalid attribute container definition: {container_name:s} name ' f'missing of attribute: {attribute_index:d}.')) if attribute_name in class_attributes: raise errors.ParseError(( f'Invalid attribute container definition: {container_name:s} ' f'attribute: {attribute_name:s} already set.')) attribute_data_type = attribute_values.get('type', None) if not attribute_data_type: raise errors.ParseError(( f'Invalid attribute container definition: {container_name:s} type ' f'missing of attribute: {attribute_name:s}.')) if not schema.SchemaHelper.HasDataType(attribute_data_type): raise errors.ParseError(( f'Invalid attribute container definition: {container_name:s} type ' f'attribute: {attribute_name:s} unsupported data type: ' f'{attribute_data_type:s}.')) class_attributes[attribute_name] = None container_schema[attribute_name] = attribute_data_type class_attributes['SCHEMA'] = container_schema # TODO: add support for _SERIALIZABLE_PROTECTED_ATTRIBUTES. return type(class_name, (AttributeContainerWithSchema, ), class_attributes) def _ReadFromFileObject(self, file_object): """Reads the definitions from a file-like object. Args: file_object (file): definitions file-like object. Yields: AttributeContainer: an attribute container. """ yaml_generator = yaml.safe_load_all(file_object) for yaml_definition in yaml_generator: yield self._ReadDefinition(yaml_definition) def ReadFromFile(self, path): """Reads the definitions from a YAML file. Args: path (str): path to a definitions file. Yields: AttributeContainer: an attribute container. """ with open(path, 'r', encoding='utf-8') as file_object: for yaml_definition in self._ReadFromFileObject(file_object): yield yaml_definition acstore-20240407/acstore/interface.py000066400000000000000000000172401460443036000173740ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The attribute container store interface.""" import abc import collections from acstore.containers import manager as containers_manager class AttributeSerializer(object): """Attribute serializer.""" @abc.abstractmethod def DeserializeValue(self, value): """Deserializes a value. Args: value (object): serialized value. Returns: object: runtime value. """ @abc.abstractmethod def SerializeValue(self, value): """Serializes a value. Args: value (object): runtime value. Returns: object: serialized value. """ class AttributeContainerStore(object): """Interface of an attribute container store. Attributes: format_version (int): storage format version. """ def __init__(self): """Initializes an attribute container store.""" super(AttributeContainerStore, self).__init__() self._attribute_container_sequence_numbers = collections.Counter() self._containers_manager = containers_manager.AttributeContainersManager self._storage_profiler = None self.format_version = None def _GetAttributeContainerNextSequenceNumber(self, container_type): """Retrieves the next sequence number of an attribute container. Args: container_type (str): attribute container type. Returns: int: next sequence number. """ self._attribute_container_sequence_numbers[container_type] += 1 return self._attribute_container_sequence_numbers[container_type] def _GetAttributeContainerSchema(self, container_type): """Retrieves the schema of an attribute container. Args: container_type (str): attribute container type. Returns: dict[str, str]: attribute container schema or an empty dictionary if no schema available. """ try: schema = self._containers_manager.GetSchema(container_type) except ValueError: schema = {} return schema @abc.abstractmethod def _RaiseIfNotReadable(self): """Raises if the store is not readable. Raises: OSError: if the store cannot be read from. IOError: if the store cannot be read from. """ @abc.abstractmethod def _RaiseIfNotWritable(self): """Raises if the store is not writable. Raises: OSError: if the store cannot be written to. IOError: if the store cannot be written to. """ def _SetAttributeContainerNextSequenceNumber( self, container_type, next_sequence_number): """Sets the next sequence number of an attribute container. Args: container_type (str): attribute container type. next_sequence_number (int): next sequence number. """ self._attribute_container_sequence_numbers[ container_type] = next_sequence_number @abc.abstractmethod def _WriteExistingAttributeContainer(self, container): """Writes an existing attribute container to the store. Args: container (AttributeContainer): attribute container. """ @abc.abstractmethod def _WriteNewAttributeContainer(self, container): """Writes a new attribute container to the store. Args: container (AttributeContainer): attribute container. """ def AddAttributeContainer(self, container): """Adds a new attribute container. Args: container (AttributeContainer): attribute container. Raises: OSError: if the store cannot be written to. IOError: if the store cannot be written to. """ self._RaiseIfNotWritable() self._WriteNewAttributeContainer(container) @abc.abstractmethod def Close(self): """Closes the store.""" @abc.abstractmethod def GetAttributeContainerByIdentifier(self, container_type, identifier): """Retrieves a specific type of container with a specific identifier. Args: container_type (str): container type. identifier (AttributeContainerIdentifier): attribute container identifier. Returns: AttributeContainer: attribute container or None if not available. """ @abc.abstractmethod def GetAttributeContainerByIndex(self, container_type, index): """Retrieves a specific attribute container. Args: container_type (str): attribute container type. index (int): attribute container index. Returns: AttributeContainer: attribute container or None if not available. """ @abc.abstractmethod def GetAttributeContainers(self, container_type, filter_expression=None): """Retrieves a specific type of attribute containers. Args: container_type (str): attribute container type. filter_expression (Optional[str]): expression to filter the resulting attribute containers by. Returns: generator(AttributeContainer): attribute container generator. """ @abc.abstractmethod def GetNumberOfAttributeContainers(self, container_type): """Retrieves the number of a specific type of attribute containers. Args: container_type (str): attribute container type. Returns: int: the number of containers of a specified type. """ @abc.abstractmethod def HasAttributeContainers(self, container_type): """Determines if a store contains a specific type of attribute container. Args: container_type (str): attribute container type. Returns: bool: True if the store contains the specified type of attribute containers. """ @abc.abstractmethod def Open(self, **kwargs): """Opens the store.""" def SetStorageProfiler(self, storage_profiler): """Sets the storage profiler. Args: storage_profiler (StorageProfiler): storage profiler. """ self._storage_profiler = storage_profiler def UpdateAttributeContainer(self, container): """Updates an existing attribute container. Args: container (AttributeContainer): attribute container. Raises: OSError: if the store cannot be written to. IOError: if the store cannot be written to. """ self._RaiseIfNotWritable() self._WriteExistingAttributeContainer(container) class AttributeContainerStoreWithReadCache(AttributeContainerStore): """Interface of an attribute container store with read cache. Attributes: format_version (int): storage format version. """ # pylint: disable=abstract-method # The maximum number of cached attribute containers _MAXIMUM_CACHED_CONTAINERS = 32 * 1024 def __init__(self): """Initializes an attribute container store with read cache.""" super(AttributeContainerStoreWithReadCache, self).__init__() self._attribute_container_cache = collections.OrderedDict() def _CacheAttributeContainerByIndex(self, attribute_container, index): """Caches a specific attribute container. Args: attribute_container (AttributeContainer): attribute container. index (int): attribute container index. """ if len(self._attribute_container_cache) >= self._MAXIMUM_CACHED_CONTAINERS: self._attribute_container_cache.popitem(last=True) lookup_key = f'{attribute_container.CONTAINER_TYPE:s}.{index:d}' self._attribute_container_cache[lookup_key] = attribute_container self._attribute_container_cache.move_to_end(lookup_key, last=False) def _GetCachedAttributeContainer(self, container_type, index): """Retrieves a specific cached attribute container. Args: container_type (str): attribute container type. index (int): attribute container index. Returns: AttributeContainer: attribute container or None if not available. """ lookup_key = f'{container_type:s}.{index:d}' attribute_container = self._attribute_container_cache.get(lookup_key, None) if attribute_container: self._attribute_container_cache.move_to_end(lookup_key, last=False) return attribute_container acstore-20240407/acstore/profilers.py000066400000000000000000000076371460443036000174520ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The profiler classes.""" import codecs import gzip import os import time class CPUTimeMeasurement(object): """The CPU time measurement. Attributes: start_sample_time (float): start sample time or None if not set. total_cpu_time (float): total CPU time or None if not set. """ def __init__(self): """Initializes the CPU time measurement.""" super(CPUTimeMeasurement, self).__init__() self._start_cpu_time = None self.start_sample_time = None self.total_cpu_time = None def SampleStart(self): """Starts measuring the CPU time.""" self._start_cpu_time = time.perf_counter() self.start_sample_time = time.time() self.total_cpu_time = 0 def SampleStop(self): """Stops measuring the CPU time.""" if self._start_cpu_time is not None: self.total_cpu_time += time.perf_counter() - self._start_cpu_time class StorageProfiler(object): """The storage profiler.""" _FILENAME_PREFIX = 'storage' _FILE_HEADER = ( 'Time\tName\tOperation\tDescription\tProcessing time\tData size\t' 'Compressed data size\n') def __init__(self, identifier, path): """Initializes a storage profiler. Sample files are gzip compressed UTF-8 encoded CSV files. Args: identifier (str): identifier of the profiling session used to create the sample filename. path (str): path of the sample file. """ super(StorageProfiler, self).__init__() self._identifier = identifier self._path = path self._profile_measurements = {} self._sample_file = None self._start_time = None def _WritesString(self, content): """Writes a string to the sample file. Args: content (str): content to write to the sample file. """ content_bytes = codecs.encode(content, 'utf-8') self._sample_file.write(content_bytes) @classmethod def IsSupported(cls): """Determines if the profiler is supported. Returns: bool: True if the profiler is supported. """ return True def Sample( self, profile_name, operation, description, data_size, compressed_data_size): """Takes a sample of data read or written for profiling. Args: profile_name (str): name of the profile to sample. operation (str): operation, either 'read' or 'write'. description (str): description of the data read. data_size (int): size of the data read in bytes. compressed_data_size (int): size of the compressed data read in bytes. """ measurements = self._profile_measurements.get(profile_name) if measurements: sample_time = measurements.start_sample_time processing_time = measurements.total_cpu_time else: sample_time = time.time() processing_time = 0.0 self._WritesString(( f'{sample_time:f}\t{profile_name:s}\t{operation:s}\t{description:s}\t' f'{processing_time:f}\t{data_size:d}\t{compressed_data_size:d}\n')) def Start(self): """Starts the profiler.""" filename = f'{self._FILENAME_PREFIX:s}-{self._identifier:s}.csv.gz' if self._path: filename = os.path.join(self._path, filename) self._sample_file = gzip.open(filename, 'wb') self._WritesString(self._FILE_HEADER) self._start_time = time.time() def StartTiming(self, profile_name): """Starts timing CPU time. Args: profile_name (str): name of the profile to sample. """ if profile_name not in self._profile_measurements: self._profile_measurements[profile_name] = CPUTimeMeasurement() self._profile_measurements[profile_name].SampleStart() def Stop(self): """Stops the profiler.""" self._sample_file.close() self._sample_file = None def StopTiming(self, profile_name): """Stops timing CPU time. Args: profile_name (str): name of the profile to sample. """ measurements = self._profile_measurements.get(profile_name) if measurements: measurements.SampleStop() acstore-20240407/acstore/sqlite_store.py000066400000000000000000001044521460443036000201530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """SQLite-based attribute container store.""" import ast import itertools import json import os import pathlib import sqlite3 from acstore import interface from acstore.containers import interface as containers_interface from acstore.helpers import schema as schema_helper def PythonAST2SQL(ast_node): """Converts a Python AST to SQL. Args: ast_node (ast.Node): node of the Python AST. Returns: str: SQL statement that represents the node. Raises: TypeError: if the type of node is not supported. """ if isinstance(ast_node, ast.BoolOp): if isinstance(ast_node.op, ast.And): operand = ' AND ' elif isinstance(ast_node.op, ast.Or): operand = ' OR ' else: raise TypeError(ast_node) return operand.join([ PythonAST2SQL(ast_node_value) for ast_node_value in ast_node.values]) if isinstance(ast_node, ast.Compare): if len(ast_node.ops) != 1: raise TypeError(ast_node) if isinstance(ast_node.ops[0], ast.Eq): operator = ' = ' elif isinstance(ast_node.ops[0], ast.NotEq): operator = ' <> ' else: raise TypeError(ast_node) if len(ast_node.comparators) != 1: raise TypeError(ast_node) sql_left = PythonAST2SQL(ast_node.left) sql_right = PythonAST2SQL(ast_node.comparators[0]) return operator.join([sql_left, sql_right]) if isinstance(ast_node, ast.Constant): if isinstance(ast_node.value, str): return f'"{ast_node.value:s}"' return str(ast_node.value) if isinstance(ast_node, ast.Name): return ast_node.id if isinstance(ast_node, ast.Num): return str(ast_node.n) if isinstance(ast_node, ast.Str): return f'"{ast_node.s:s}"' raise TypeError(ast_node) class SQLiteSchemaHelper(object): """SQLite schema helper.""" _MAPPINGS = { 'bool': 'INTEGER', 'int': 'INTEGER', 'str': 'TEXT', 'timestamp': 'BIGINT'} def GetStorageDataType(self, data_type): """Retrieves the storage data type. Args: data_type (str): schema data type. Returns: str: corresponding SQLite data type. """ return self._MAPPINGS.get(data_type, 'TEXT') def DeserializeValue(self, data_type, value): """Deserializes a value. Args: data_type (str): schema data type. value (object): serialized value. Returns: object: runtime value. Raises: IOError: if the schema data type is not supported. OSError: if the schema data type is not supported. """ if not schema_helper.SchemaHelper.HasDataType(data_type): raise IOError(f'Unsupported data type: {data_type:s}') if value is not None: if data_type == 'AttributeContainerIdentifier': identifier = containers_interface.AttributeContainerIdentifier() identifier.CopyFromString(value) value = identifier elif data_type == 'bool': value = bool(value) elif data_type not in self._MAPPINGS: serializer = schema_helper.SchemaHelper.GetAttributeSerializer( data_type, 'json') json_dict = json.loads(value) value = serializer.DeserializeValue(json_dict) return value def SerializeValue(self, data_type, value): """Serializes a value. Args: data_type (str): schema data type. value (object): runtime value. Returns: object: serialized value. Raises: IOError: if the schema data type is not supported. OSError: if the schema data type is not supported. """ if not schema_helper.SchemaHelper.HasDataType(data_type): raise IOError(f'Unsupported data type: {data_type:s}') if value is not None: if data_type == 'AttributeContainerIdentifier' and isinstance( value, containers_interface.AttributeContainerIdentifier): value = value.CopyToString() elif data_type == 'bool': value = int(value) elif data_type not in self._MAPPINGS: serializer = schema_helper.SchemaHelper.GetAttributeSerializer( data_type, 'json') # JSON will not serialize certain runtime types like set, therefore # these are cast to list first. if isinstance(value, set): value = list(value) json_dict = serializer.SerializeValue(value) return json.dumps(json_dict) return value class SQLiteAttributeContainerStore( interface.AttributeContainerStoreWithReadCache): """SQLite-based attribute container store. Attributes: format_version (int): storage format version. serialization_format (str): serialization format. """ _FORMAT_VERSION = 20230312 # The earliest format version, stored in-file, that this class # is able to append (write). _APPEND_COMPATIBLE_FORMAT_VERSION = 20230312 # The earliest format version, stored in-file, that this class # is able to upgrade (write new format features). _UPGRADE_COMPATIBLE_FORMAT_VERSION = 20230312 # The earliest format version, stored in-file, that this class # is able to read. _READ_COMPATIBLE_FORMAT_VERSION = 20230312 _CREATE_METADATA_TABLE_QUERY = ( 'CREATE TABLE metadata (key TEXT, value TEXT);') _HAS_TABLE_QUERY = ( 'SELECT name FROM sqlite_master ' 'WHERE type = "table" AND name = "{0:s}"') _INSERT_METADATA_VALUE_QUERY = ( 'INSERT INTO metadata (key, value) VALUES (?, ?)') _MAXIMUM_WRITE_CACHE_SIZE = 50 def __init__(self): """Initializes a SQLite attribute container store.""" super(SQLiteAttributeContainerStore, self).__init__() self._connection = None self._cursor = None self._is_open = False self._read_only = True self._schema_helper = SQLiteSchemaHelper() self._write_cache = {} self.format_version = self._FORMAT_VERSION self.serialization_format = 'json' def _CacheAttributeContainerForWrite( self, container_type, column_names, values): """Caches an attribute container for writing. Args: container_type (str): attribute container type. column_names (list[str]): names of the columns. values (list[str]): values for each of the columns. """ write_cache = self._write_cache.get(container_type, [column_names]) write_cache.append(values) if len(write_cache) >= self._MAXIMUM_WRITE_CACHE_SIZE: self._FlushWriteCache(container_type, write_cache) write_cache = [column_names] self._write_cache[container_type] = write_cache def _CheckStorageMetadata(self, metadata_values, check_readable_only=False): """Checks the storage metadata. Args: metadata_values (dict[str, str]): metadata values per key. check_readable_only (Optional[bool]): whether the store should only be checked to see if it can be read. If False, the store will be checked to see if it can be read and written to. Raises: IOError: if the storage metadata is not supported. OSError: if the storage metadata is not supported. """ format_version = metadata_values.get('format_version', None) if not format_version: raise IOError('Missing format version.') try: format_version = int(format_version, 10) except (TypeError, ValueError): raise IOError(f'Invalid format version: {format_version!s}.') if (not check_readable_only and format_version < self._APPEND_COMPATIBLE_FORMAT_VERSION): raise IOError(( f'Format version: {format_version:d} is too old and can no longer ' f'be written, minimum supported version: ' f'{self._APPEND_COMPATIBLE_FORMAT_VERSION:d}.')) if format_version < self._READ_COMPATIBLE_FORMAT_VERSION: raise IOError(( f'Format version: {format_version:d} is too old and can no longer ' f'be read, minimum supported version: ' f'{self._READ_COMPATIBLE_FORMAT_VERSION:d}.')) if format_version > self._FORMAT_VERSION: raise IOError(( f'Format version: {format_version:d} is too new and not yet ' f'supported, minimum supported version: ' f'{self._FORMAT_VERSION:d}.')) serialization_format = metadata_values.get('serialization_format', None) if serialization_format != 'json': raise IOError( f'Unsupported serialization format: {serialization_format!s}') # Ensure format_version is an integer. metadata_values['format_version'] = format_version def _CommitWriteCache(self, container_type): """Commits the write cache for a specific type of attribute container. Args: container_type (str): attribute container type. """ write_cache = self._write_cache.get(container_type, []) if len(write_cache) > 1: self._FlushWriteCache(container_type, write_cache) del self._write_cache[container_type] def _CreateAttributeContainerTable(self, container_type): """Creates a table for a specific attribute container type. Args: container_type (str): attribute container type. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ schema = self._GetAttributeContainerSchema(container_type) if not schema: raise IOError(f'Unsupported attribute container type: {container_type:s}') column_definitions = ['_identifier INTEGER PRIMARY KEY AUTOINCREMENT'] for name, data_type in sorted(schema.items()): data_type = self._schema_helper.GetStorageDataType(data_type) column_definitions.append(f'{name:s} {data_type:s}') column_definitions = ', '.join(column_definitions) query = f'CREATE TABLE {container_type:s} ({column_definitions:s});' try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) def _CreateAttributeContainerFromRow( self, container_type, column_names, row, first_column_index): """Creates an attribute container of a row in the database. Args: container_type (str): attribute container type. column_names (list[str]): names of the columns selected. row (sqlite.Row): row as a result from a SELECT query. first_column_index (int): index of the first column in row. Returns: AttributeContainer: attribute container. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ schema = self._GetAttributeContainerSchema(container_type) if not schema: raise IOError(f'Unsupported attribute container type: {container_type:s}') container = self._containers_manager.CreateAttributeContainer( container_type) for column_index, name in enumerate(column_names): row_value = row[first_column_index + column_index] if row_value is not None: data_type = schema[name] try: attribute_value = self._schema_helper.DeserializeValue( data_type, row_value) except IOError: raise IOError(( f'Unsupported attribute container type: {container_type:s} ' f'attribute: {name:s} data type: {data_type:s}')) setattr(container, name, attribute_value) return container def _Flush(self): """Ensures cached data is written to file. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ for container_type, write_cache in self._write_cache.items(): if len(write_cache) > 1: self._FlushWriteCache(container_type, write_cache) self._write_cache = {} # We need to run commit or not all data is stored in the database. self._connection.commit() def _FlushWriteCache(self, container_type, write_cache): """Flushes attribute container values cached for writing. Args: container_type (str): attribute container type. write_cache (list[tuple[str]]): cached attribute container values. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ column_names = write_cache.pop(0) value_statement = ','.join(['?'] * len(column_names)) value_statement = f'({value_statement:s})' values_statement = ', '.join([value_statement] * len(write_cache)) column_names_string = ', '.join(column_names) query = (f'INSERT INTO {container_type:s} ({column_names_string:s}) ' f'VALUES {values_statement:s}') if self._storage_profiler: self._storage_profiler.StartTiming('write_new') try: values = list(itertools.chain(*write_cache)) self._cursor.execute(query, values) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) finally: if self._storage_profiler: self._storage_profiler.StopTiming('write_new') def _GetAttributeContainersWithFilter( self, container_type, column_names=None, filter_expression=None, order_by=None): """Retrieves a specific type of stored attribute containers. Args: container_type (str): attribute container type. column_names (Optional[list[str]]): names of the columns to retrieve. filter_expression (Optional[str]): SQL expression to filter results by. order_by (Optional[str]): name of a column to order the results by. Yields: AttributeContainer: attribute container. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ self._CommitWriteCache(container_type) if self._attribute_container_sequence_numbers[container_type]: column_names_string = ', '.join(column_names) query = (f'SELECT _identifier, {column_names_string:s} ' f'FROM {container_type:s}') if filter_expression: query = ' WHERE '.join([query, filter_expression]) if order_by: query = ' ORDER BY '.join([query, order_by]) # Use a local cursor to prevent another query interrupting the generator. cursor = self._connection.cursor() try: cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store for container: ' f'{container_type:s} with error: {exception!s}')) if self._storage_profiler: self._storage_profiler.StartTiming('get_containers') try: row = cursor.fetchone() finally: if self._storage_profiler: self._storage_profiler.StopTiming('get_containers') while row: container = self._CreateAttributeContainerFromRow( container_type, column_names, row, 1) identifier = containers_interface.AttributeContainerIdentifier( name=container_type, sequence_number=row[0]) container.SetIdentifier(identifier) yield container if self._storage_profiler: self._storage_profiler.StartTiming('get_containers') try: row = cursor.fetchone() finally: if self._storage_profiler: self._storage_profiler.StopTiming('get_containers') def _GetNumberOfAttributeContainerRows(self, container_type): """Retrieves the number of attribute container rows. Args: container_type (str): attribute container type. Returns: int: the number of rows of a specified attribute container type. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ self._CommitWriteCache(container_type) if not self._HasTable(container_type): return 0 # Note that this is SQLite specific, and will give inaccurate results if # there are DELETE commands run on the table. acstore does not run any # DELETE commands. query = f'SELECT MAX(_ROWID_) FROM {container_type:s} LIMIT 1' try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) row = self._cursor.fetchone() if not row: return 0 return row[0] or 0 def _HasTable(self, table_name): """Determines if a specific table exists. Args: table_name (str): name of the table. Returns: bool: True if the table exists, false otherwise. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ query = self._HAS_TABLE_QUERY.format(table_name) try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) return bool(self._cursor.fetchone()) def _RaiseIfNotReadable(self): """Raises if the attribute container store is not readable. Raises: IOError: when the attribute container store is closed. OSError: when the attribute container store is closed. """ if not self._is_open: raise IOError('Unable to read from closed attribute container store.') def _RaiseIfNotWritable(self): """Raises if the attribute container store is not writable. Raises: IOError: when the attribute container store is closed or read-only. OSError: when the attribute container store is closed or read-only. """ if not self._is_open: raise IOError('Unable to write to closed attribute container store.') if self._read_only: raise IOError('Unable to write to read-only attribute container store.') def _ReadAndCheckStorageMetadata(self, check_readable_only=False): """Reads storage metadata and checks that the values are valid. Args: check_readable_only (Optional[bool]): whether the store should only be checked to see if it can be read. If False, the store will be checked to see if it can be read and written to. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ metadata_values = self._ReadMetadata() self._CheckStorageMetadata( metadata_values, check_readable_only=check_readable_only) self.format_version = metadata_values['format_version'] self.serialization_format = metadata_values['serialization_format'] def _ReadMetadata(self): """Reads metadata. Returns: dict[str, str]: metadata values. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ query = 'SELECT key, value FROM metadata' try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) return {row[0]: row[1] for row in self._cursor.fetchall()} def _UpdateStorageMetadataFormatVersion(self): """Updates the storage metadata format version. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ if self.format_version >= self._UPGRADE_COMPATIBLE_FORMAT_VERSION: query = (f'UPDATE metadata SET value = {self._FORMAT_VERSION:d} ' f'WHERE key = "format_version"') try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) def _WriteExistingAttributeContainer(self, container): """Writes an existing attribute container to the store. The table for the container type must exist. Args: container (AttributeContainer): attribute container. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ self._CommitWriteCache(container.CONTAINER_TYPE) identifier = container.GetIdentifier() schema = self._GetAttributeContainerSchema(container.CONTAINER_TYPE) if not schema: raise IOError( f'Unsupported attribute container type: {container.CONTAINER_TYPE:s}') column_names = [] row_values = [] for name, data_type in sorted(schema.items()): attribute_value = getattr(container, name, None) try: row_value = self._schema_helper.SerializeValue( data_type, attribute_value) except IOError: raise IOError(( f'Unsupported attribute container type: ' f'{container.CONTAINER_TYPE:s} attribute: {name:s} data type: ' f'{data_type:s}')) column_names.append(f'{name:s} = ?') row_values.append(row_value) column_names_string = ', '.join(column_names) query = (f'UPDATE {container.CONTAINER_TYPE:s} SET {column_names_string:s} ' f'WHERE _identifier = {identifier.sequence_number:d}') if self._storage_profiler: self._storage_profiler.StartTiming('write_existing') try: self._cursor.execute(query, row_values) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) finally: if self._storage_profiler: self._storage_profiler.StopTiming('write_existing') def _WriteMetadata(self): """Writes metadata. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ try: self._cursor.execute(self._CREATE_METADATA_TABLE_QUERY) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) self._WriteMetadataValue('format_version', f'{self._FORMAT_VERSION:d}') self._WriteMetadataValue('serialization_format', self.serialization_format) def _WriteMetadataValue(self, key, value): """Writes a metadata value. Args: key (str): key of the storage metadata. value (str): value of the storage metadata. Raises: IOError: when there is an error querying the attribute container store. OSError: when there is an error querying the attribute container store. """ try: self._cursor.execute(self._INSERT_METADATA_VALUE_QUERY, (key, value)) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) def _WriteNewAttributeContainer(self, container): """Writes a new attribute container to the store. The table for the container type is created if needed. Args: container (AttributeContainer): attribute container. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ next_sequence_number = self._GetAttributeContainerNextSequenceNumber( container.CONTAINER_TYPE) if (next_sequence_number == 1 and not self._HasTable(container.CONTAINER_TYPE)): self._CreateAttributeContainerTable(container.CONTAINER_TYPE) identifier = containers_interface.AttributeContainerIdentifier( name=container.CONTAINER_TYPE, sequence_number=next_sequence_number) container.SetIdentifier(identifier) schema = self._GetAttributeContainerSchema(container.CONTAINER_TYPE) if not schema: raise IOError( f'Unsupported attribute container type: {container.CONTAINER_TYPE:s}') column_names = [] row_values = [] for name, data_type in sorted(schema.items()): attribute_value = getattr(container, name, None) try: row_value = self._schema_helper.SerializeValue( data_type, attribute_value) except IOError: raise IOError(( f'Unsupported attribute container type: ' f'{container.CONTAINER_TYPE:s} attribute: {name:s} data type: ' f'{data_type:s}')) column_names.append(name) row_values.append(row_value) self._CacheAttributeContainerForWrite( container.CONTAINER_TYPE, column_names, row_values) self._CacheAttributeContainerByIndex(container, next_sequence_number - 1) @classmethod def CheckSupportedFormat(cls, path): """Checks if the attribute container store format is supported. Args: path (str): path to the attribute container store. Returns: bool: True if the format is supported. """ # Check if the path is an existing file, to prevent sqlite3 creating # an emtpy database file. if not os.path.isfile(path): return False try: connection = sqlite3.connect( path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) cursor = connection.cursor() query = 'SELECT * FROM metadata' cursor.execute(query) metadata_values = {row[0]: row[1] for row in cursor.fetchall()} format_version = metadata_values.get('format_version', None) if format_version: try: format_version = int(format_version, 10) result = True except (TypeError, ValueError): pass connection.close() except (IOError, TypeError, ValueError, sqlite3.DatabaseError): result = False return result def Close(self): """Closes the file. Raises: IOError: if the attribute container store is already closed. OSError: if the attribute container store is already closed. """ if not self._is_open: raise IOError('Attribute container store already closed.') if self._connection: self._Flush() self._connection.close() self._connection = None self._cursor = None self._is_open = False def GetAttributeContainerByIdentifier(self, container_type, identifier): """Retrieves a specific type of container with a specific identifier. Args: container_type (str): container type. identifier (AttributeContainerIdentifier): attribute container identifier. Returns: AttributeContainer: attribute container or None if not available. Raises: IOError: when the store is closed or if an unsupported attribute container is provided. OSError: when the store is closed or if an unsupported attribute container is provided. """ return self.GetAttributeContainerByIndex( container_type, identifier.sequence_number - 1) def GetAttributeContainerByIndex(self, container_type, index): """Retrieves a specific attribute container. Args: container_type (str): attribute container type. index (int): attribute container index. Returns: AttributeContainer: attribute container or None if not available. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ container = self._GetCachedAttributeContainer(container_type, index) if container: return container self._CommitWriteCache(container_type) if not self._attribute_container_sequence_numbers[container_type]: return None schema = self._GetAttributeContainerSchema(container_type) if not schema: raise IOError(f'Unsupported attribute container type: {container_type:s}') column_names = sorted(schema.keys()) column_names_string = ', '.join(column_names) row_number = index + 1 query = (f'SELECT {column_names_string:s} FROM {container_type:s} WHERE ' f'rowid = {row_number:d}') try: self._cursor.execute(query) except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) if self._storage_profiler: self._storage_profiler.StartTiming('get_container_by_index') try: row = self._cursor.fetchone() finally: if self._storage_profiler: self._storage_profiler.StopTiming('get_container_by_index') if not row: return None container = self._CreateAttributeContainerFromRow( container_type, column_names, row, 0) identifier = containers_interface.AttributeContainerIdentifier( name=container_type, sequence_number=row_number) container.SetIdentifier(identifier) self._CacheAttributeContainerByIndex(container, index) return container def GetAttributeContainers(self, container_type, filter_expression=None): """Retrieves a specific type of stored attribute containers. Args: container_type (str): attribute container type. filter_expression (Optional[str]): expression to filter the resulting attribute containers by. Returns: generator(AttributeContainer): attribute container generator. Raises: IOError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. OSError: when there is an error querying the attribute container store or if an unsupported attribute container is provided. """ schema = self._GetAttributeContainerSchema(container_type) if not schema: raise IOError(f'Unsupported attribute container type: {container_type:s}') column_names = sorted(schema.keys()) sql_filter_expression = None if filter_expression: expression_ast = ast.parse(filter_expression, mode='eval') sql_filter_expression = PythonAST2SQL(expression_ast.body) return self._GetAttributeContainersWithFilter( container_type, column_names=column_names, filter_expression=sql_filter_expression) def GetNumberOfAttributeContainers(self, container_type): """Retrieves the number of a specific type of attribute containers. Args: container_type (str): attribute container type. Returns: int: the number of containers of a specified type. """ return self._attribute_container_sequence_numbers[container_type] def HasAttributeContainers(self, container_type): """Determines if store contains a specific type of attribute containers. Args: container_type (str): attribute container type. Returns: bool: True if the store contains the specified type of attribute containers. """ return self._attribute_container_sequence_numbers[container_type] > 0 def Open(self, path=None, read_only=True, **unused_kwargs): # pylint: disable=arguments-differ """Opens the store. Args: path (Optional[str]): path to the attribute container store. read_only (Optional[bool]): True if the file should be opened in read-only mode. Raises: IOError: if the attribute container store is already opened or if the database cannot be connected. OSError: if the attribute container store is already opened or if the database cannot be connected. ValueError: if path is missing. """ if self._is_open: raise IOError('Attribute container store already opened.') if not path: raise ValueError('Missing path.') path = os.path.abspath(path) try: path_uri = pathlib.Path(path).as_uri() if read_only: path_uri = f'{path_uri:s}?mode=ro' except ValueError: path_uri = None detect_types = sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES if path_uri: connection = sqlite3.connect( path_uri, detect_types=detect_types, isolation_level='DEFERRED', uri=True) else: connection = sqlite3.connect( path, detect_types=detect_types, isolation_level='DEFERRED') try: # Use in-memory journaling mode to reduce IO. connection.execute('PRAGMA journal_mode=MEMORY') # Turn off insert transaction integrity since we want to do bulk insert. connection.execute('PRAGMA synchronous=OFF') except (sqlite3.InterfaceError, sqlite3.OperationalError) as exception: raise IOError(( f'Unable to query attribute container store with error: ' f'{exception!s}')) cursor = connection.cursor() if not cursor: return self._connection = connection self._cursor = cursor self._is_open = True self._read_only = read_only if read_only: self._ReadAndCheckStorageMetadata(check_readable_only=True) else: if not self._HasTable('metadata'): self._WriteMetadata() else: self._ReadAndCheckStorageMetadata() # Update the storage metadata format version in case we are adding # new format features that are not backwards compatible. self._UpdateStorageMetadataFormatVersion() self._connection.commit() # Initialize next_sequence_number based on the file contents so that # AttributeContainerIdentifier points to the correct attribute container. for container_type in self._containers_manager.GetContainerTypes(): next_sequence_number = self._GetNumberOfAttributeContainerRows( container_type) self._SetAttributeContainerNextSequenceNumber( container_type, next_sequence_number) acstore-20240407/appveyor.yml000066400000000000000000000033351460443036000160120ustar00rootroot00000000000000environment: matrix: - DESCRIPTION: "Run tests on Windows with 32-bit Python 3.12" MACHINE_TYPE: "x86" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: tests - DESCRIPTION: "Run tests on Windows with 64-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: tests - DESCRIPTION: "Build wheel on Windows with 32-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: wheel - DESCRIPTION: "Build wheel on Windows with 64-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: wheel - DESCRIPTION: "Run tests on Mac OS with Python 3.12" APPVEYOR_BUILD_WORKER_IMAGE: macos-monterey HOMEBREW_NO_INSTALL_CLEANUP: 1 TARGET: tests install: - cmd: "%PYTHON%\\python.exe -m pip install -U build pip setuptools twine wheel" - ps: If ($isWindows) { .\config\appveyor\install.ps1 } - sh: config/appveyor/install.sh build_script: - cmd: IF [%TARGET%]==[wheel] ( "%PYTHON%\\python.exe" -m build --wheel ) test_script: - cmd: IF [%TARGET%]==[tests] ( "%PYTHON%\\python.exe" run_tests.py && IF EXIST "tests\\end-to-end.py" ( set PYTHONPATH=. && "%PYTHON%\\python.exe" "tests\\end-to-end.py" --debug -c "config\\end-to-end.ini" ) ) - sh: config/appveyor/runtests.sh artifacts: - path: dist\*.whl acstore-20240407/config/000077500000000000000000000000001460443036000146635ustar00rootroot00000000000000acstore-20240407/config/appveyor/000077500000000000000000000000001460443036000165305ustar00rootroot00000000000000acstore-20240407/config/appveyor/install.ps1000066400000000000000000000014441460443036000206260ustar00rootroot00000000000000# Script to set up tests on AppVeyor Windows. $Dependencies = "PyYAML" If ($Dependencies.Length -gt 0) { $Dependencies = ${Dependencies} -split " " $Output = Invoke-Expression -Command "git clone https://github.com/log2timeline/l2tdevtools.git ..\l2tdevtools 2>&1" | %{ "$_" } Write-Host (${Output} | Out-String) If ($env:APPVEYOR_REPO_BRANCH -eq "main") { $Track = "stable" } Else { $Track = $env:APPVEYOR_REPO_BRANCH } New-Item -ItemType "directory" -Name "dependencies" $env:PYTHONPATH = "..\l2tdevtools" $Output = Invoke-Expression -Command "& '${env:PYTHON}\python.exe' ..\l2tdevtools\tools\update.py --download-directory dependencies --machine-type ${env:MACHINE_TYPE} --track ${env:L2TBINARIES_TRACK} ${Dependencies} 2>&1" | %{ "$_" } Write-Host (${Output} | Out-String) } acstore-20240407/config/appveyor/install.sh000077500000000000000000000001751460443036000205400ustar00rootroot00000000000000# Script to set up tests on AppVeyor MacOS. set -e brew update -q brew install -q gettext gnu-sed python@3.12 tox || true acstore-20240407/config/appveyor/runtests.sh000077500000000000000000000011541460443036000207570ustar00rootroot00000000000000#!/bin/sh # Script to run tests # Set the following environment variables to build libyal with gettext. export CPPFLAGS="-I/usr/local/include -I/usr/local/opt/gettext/include ${CPPFLAGS}"; export LDFLAGS="-L/usr/local/lib -L/usr/local/opt/gettext/lib ${LDFLAGS}"; # Set the following environment variables to build pycrypto and yara-python. export CPPFLAGS="-I/usr/local/opt/openssl@1.1/include ${CPPFLAGS}"; export LDFLAGS="-L/usr/local/opt/openssl@1.1/lib ${LDFLAGS}"; # Set the following environment variables to ensure tox can find Python 3.12. export PATH="/usr/local/opt/python@3.12/bin:${PATH}"; tox -e py312 acstore-20240407/config/dpkg/000077500000000000000000000000001460443036000156105ustar00rootroot00000000000000acstore-20240407/config/dpkg/changelog000066400000000000000000000002521460443036000174610ustar00rootroot00000000000000acstore (20240407-1) unstable; urgency=low * Auto-generated -- Log2Timeline maintainers Sun, 07 Apr 2024 07:12:15 +0200 acstore-20240407/config/dpkg/clean000066400000000000000000000000241460443036000166110ustar00rootroot00000000000000acstore/*.pyc *.pyc acstore-20240407/config/dpkg/compat000066400000000000000000000000021460443036000170060ustar00rootroot000000000000009 acstore-20240407/config/dpkg/control000066400000000000000000000011071460443036000172120ustar00rootroot00000000000000Source: acstore Section: python Priority: extra Maintainer: Log2Timeline maintainers Build-Depends: debhelper (>= 9), dh-python, python3-all (>= 3.6~), python3-setuptools Standards-Version: 4.1.4 X-Python3-Version: >= 3.6 Homepage: https://github.com/log2timeline/acstore Package: python3-acstore Architecture: all Depends: python3-yaml (>= 3.10), ${misc:Depends} Description: Python 3 module of ACStore ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write attribute container storage files. acstore-20240407/config/dpkg/copyright000066400000000000000000000017021460443036000175430ustar00rootroot00000000000000Format: http://dep.debian.net/deps/dep5 Upstream-Name: acstore Source: https://github.com/log2timeline/acstore/ Files: * Copyright: 2017 The dfDateTime project authors. License: Apache-2.0 Files: debian/* Copyright: 2017 The dfDateTime project authors. License: Apache-2.0 License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. . On Debian systems, the complete text of the Apache version 2.0 license can be found in "/usr/share/common-licenses/Apache-2.0". acstore-20240407/config/dpkg/rules000077500000000000000000000001721460443036000166700ustar00rootroot00000000000000#!/usr/bin/make -f %: dh $@ --buildsystem=pybuild --with=python3 .PHONY: override_dh_auto_test override_dh_auto_test: acstore-20240407/config/dpkg/source/000077500000000000000000000000001460443036000171105ustar00rootroot00000000000000acstore-20240407/config/dpkg/source/format000066400000000000000000000000141460443036000203160ustar00rootroot000000000000003.0 (quilt) acstore-20240407/dependencies.ini000066400000000000000000000002271460443036000165460ustar00rootroot00000000000000[yaml] dpkg_name: python3-yaml l2tbinaries_name: PyYAML minimum_version: 3.10 pypi_name: PyYAML rpm_name: python3-pyyaml version_property: __version__ acstore-20240407/docs/000077500000000000000000000000001460443036000143465ustar00rootroot00000000000000acstore-20240407/docs/conf.py000066400000000000000000000117431460443036000156530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Sphinx build configuration file.""" import os import sys from sphinx.ext import apidoc from docutils import nodes from docutils import transforms # Change PYTHONPATH to include acstore module and dependencies. sys.path.insert(0, os.path.abspath('..')) import acstore # pylint: disable=wrong-import-position import utils.dependencies # pylint: disable=wrong-import-position # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '2.0.1' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'recommonmark', 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'sphinx_markdown_tables', 'sphinx_rtd_theme', ] # We cannot install architecture dependent Python modules on readthedocs, # therefore we mock most imports. pip_installed_modules = set() dependency_helper = utils.dependencies.DependencyHelper( dependencies_file=os.path.join('..', 'dependencies.ini'), test_dependencies_file=os.path.join('..', 'test_dependencies.ini')) modules_to_mock = set(dependency_helper.dependencies.keys()) modules_to_mock = modules_to_mock.difference(pip_installed_modules) autodoc_mock_imports = sorted(modules_to_mock) # Options for the Sphinx Napoleon extension, which reads Google-style # docstrings. napoleon_google_docstring = True napoleon_numpy_docstring = False napoleon_include_init_with_doc = True napoleon_include_private_with_doc = False napoleon_include_special_with_doc = True # General information about the project. # pylint: disable=redefined-builtin project = 'ACStore' copyright = 'The ACStore authors' version = acstore.__version__ release = acstore.__version__ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The master toctree document. master_doc = 'index' # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Output file base name for HTML help builder. htmlhelp_basename = 'acstoredoc' # -- Options linkcheck ---------------------------------------------------- linkcheck_ignore = [ ] # -- Code to rewrite links for readthedocs -------------------------------- # This function is a Sphinx core event callback, the format of which is detailed # here: https://www.sphinx-doc.org/en/master/extdev/appapi.html#events # pylint: disable=unused-argument def RunSphinxAPIDoc(app): """Runs sphinx-apidoc to auto-generate documentation. Args: app (sphinx.application.Sphinx): Sphinx application. Required by the the Sphinx event callback API. """ current_directory = os.path.abspath(os.path.dirname(__file__)) module_path = os.path.join(current_directory, '..', 'acstore') api_directory = os.path.join(current_directory, 'sources', 'api') apidoc.main(['-o', api_directory, module_path, '--force']) class MarkdownLinkFixer(transforms.Transform): """Transform definition to parse .md references to internal pages.""" default_priority = 1000 _URI_PREFIXES = [] def _FixLinks(self, node): """Corrects links to .md files not part of the documentation. Args: node (docutils.nodes.Node): docutils node. Returns: docutils.nodes.Node: docutils node, with correct URIs outside of Markdown pages outside the documentation. """ if isinstance(node, nodes.reference) and 'refuri' in node: reference_uri = node['refuri'] for uri_prefix in self._URI_PREFIXES: if (reference_uri.startswith(uri_prefix) and not ( reference_uri.endswith('.asciidoc') or reference_uri.endswith('.md'))): node['refuri'] = reference_uri + '.md' break return node def _Traverse(self, node): """Traverses the document tree rooted at node. Args: node (docutils.nodes.Node): docutils node. """ self._FixLinks(node) for child_node in node.children: self._Traverse(child_node) # pylint: disable=arguments-differ def apply(self): """Applies this transform on document tree.""" self._Traverse(self.document) # pylint: invalid-name def setup(app): """Called at Sphinx initialization. Args: app (sphinx.application.Sphinx): Sphinx application. """ # Triggers sphinx-apidoc to generate API documentation. app.connect('builder-inited', RunSphinxAPIDoc) app.add_config_value( 'recommonmark_config', {'enable_auto_toc_tree': True}, True) app.add_transform(MarkdownLinkFixer) acstore-20240407/docs/index.rst000066400000000000000000000007771460443036000162220ustar00rootroot00000000000000Welcome to the ACStore documentation ==================================== ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write attribute container storage files. The source code is available from the `project page `__. .. toctree:: :maxdepth: 2 sources/user/index .. toctree:: :maxdepth: 2 API documentation Indices and tables ================== * :ref:`genindex` * :ref:`modindex` acstore-20240407/docs/requirements.txt000066400000000000000000000001661460443036000176350ustar00rootroot00000000000000certifi >= 2023.11.17 docutils Markdown recommonmark sphinx >= 4.1.0 sphinx-markdown-tables sphinx-rtd-theme >= 0.5.1 acstore-20240407/docs/sources/000077500000000000000000000000001460443036000160315ustar00rootroot00000000000000acstore-20240407/docs/sources/api/000077500000000000000000000000001460443036000166025ustar00rootroot00000000000000acstore-20240407/docs/sources/api/acstore.containers.rst000066400000000000000000000010341460443036000231360ustar00rootroot00000000000000acstore.containers package ========================== Submodules ---------- acstore.containers.interface module ----------------------------------- .. automodule:: acstore.containers.interface :members: :undoc-members: :show-inheritance: acstore.containers.manager module --------------------------------- .. automodule:: acstore.containers.manager :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: acstore.containers :members: :undoc-members: :show-inheritance: acstore-20240407/docs/sources/api/acstore.helpers.rst000066400000000000000000000013361460443036000224400ustar00rootroot00000000000000acstore.helpers package ======================= Submodules ---------- acstore.helpers.json\_serializer module --------------------------------------- .. automodule:: acstore.helpers.json_serializer :members: :undoc-members: :show-inheritance: acstore.helpers.schema module ----------------------------- .. automodule:: acstore.helpers.schema :members: :undoc-members: :show-inheritance: acstore.helpers.yaml\_definitions\_file module ---------------------------------------------- .. automodule:: acstore.helpers.yaml_definitions_file :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: acstore.helpers :members: :undoc-members: :show-inheritance: acstore-20240407/docs/sources/api/acstore.rst000066400000000000000000000017131460443036000207760ustar00rootroot00000000000000acstore package =============== Subpackages ----------- .. toctree:: :maxdepth: 4 acstore.containers acstore.helpers Submodules ---------- acstore.errors module --------------------- .. automodule:: acstore.errors :members: :undoc-members: :show-inheritance: acstore.fake\_store module -------------------------- .. automodule:: acstore.fake_store :members: :undoc-members: :show-inheritance: acstore.interface module ------------------------ .. automodule:: acstore.interface :members: :undoc-members: :show-inheritance: acstore.profilers module ------------------------ .. automodule:: acstore.profilers :members: :undoc-members: :show-inheritance: acstore.sqlite\_store module ---------------------------- .. automodule:: acstore.sqlite_store :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: acstore :members: :undoc-members: :show-inheritance: acstore-20240407/docs/sources/api/modules.rst000066400000000000000000000000721460443036000210030ustar00rootroot00000000000000acstore ======= .. toctree:: :maxdepth: 4 acstore acstore-20240407/docs/sources/user/000077500000000000000000000000001460443036000170075ustar00rootroot00000000000000acstore-20240407/docs/sources/user/Installation-instructions.md000066400000000000000000000031321460443036000245330ustar00rootroot00000000000000# Installation instructions ## pip **Note that using pip outside virtualenv is not recommended since it ignores your systems package manager. If you aren't comfortable debugging package installation issues, this is not the option for you.** Create and activate a virtualenv: ```bash virtualenv acstoreenv cd acstoreenv source ./bin/activate ``` Upgrade pip and install ACStore dependencies: ```bash pip install --upgrade pip pip install acstore ``` To deactivate the virtualenv run: ```bash deactivate ``` ## Ubuntu 22.04 LTS To install ACStore from the [GIFT Personal Package Archive (PPA)](https://launchpad.net/~gift): ```bash sudo add-apt-repository ppa:gift/stable ``` Update and install ACStore: ```bash sudo apt-get update sudo apt-get install python3-acstore ``` ## Windows The [l2tbinaries](https://github.com/log2timeline/l2tbinaries) contains the necessary packages for running ACStore. l2tbinaries provides the following branches: * main; branch intended for the "packaged release" of ACStore and dependencies; * staging; branch intended for testing pre-releases of ACStore; * dev; branch intended for the "development release" of ACStore; * testing; branch intended for testing newly created packages. The l2tdevtools project provides [an update script](https://github.com/log2timeline/l2tdevtools/wiki/Update-script) to ease the process of keeping the dependencies up to date. The script requires [pywin32](https://github.com/mhammond/pywin32/releases). To install the release versions of the dependencies run: ``` set PYTHONPATH=. C:\Python3\python.exe tools\update.py --preset acstore ``` acstore-20240407/docs/sources/user/index.rst000066400000000000000000000004361460443036000206530ustar00rootroot00000000000000############### Getting started ############### To be able to use ACStore you first need to install it. There are multiple ways to install ACStore, check the following instructions for more detail. .. toctree:: :maxdepth: 2 Installation instructions acstore-20240407/pyproject.toml000066400000000000000000000004501460443036000163310ustar00rootroot00000000000000[build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [tool.docformatter] black = false non-cap = ["dfDateTime", "dfImageTools", "dfVFS", "dfWinReg", "dtFabric", "iMessage", "iOS", "iPod", "mDNS"] non-strict = false wrap-summaries = 80 wrap-descriptions = 80 acstore-20240407/requirements.txt000066400000000000000000000000171460443036000167000ustar00rootroot00000000000000PyYAML >= 3.10 acstore-20240407/run_tests.py000077500000000000000000000016411460443036000160230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to run the tests.""" import sys import unittest # Change PYTHONPATH to include dependencies. sys.path.insert(0, '.') import utils.dependencies # pylint: disable=wrong-import-position if __name__ == '__main__': fail_unless_has_test_file = '--fail-unless-has-test-file' in sys.argv setattr(unittest, 'fail_unless_has_test_file', fail_unless_has_test_file) if fail_unless_has_test_file: # Remove --fail-unless-has-test-file otherwise it will conflict with # the argparse tests. sys.argv.remove('--fail-unless-has-test-file') dependency_helper = utils.dependencies.DependencyHelper() if not dependency_helper.CheckTestDependencies(): sys.exit(1) test_suite = unittest.TestLoader().discover('tests', pattern='*.py') test_results = unittest.TextTestRunner(verbosity=2).run(test_suite) if not test_results.wasSuccessful(): sys.exit(1) acstore-20240407/setup.cfg000066400000000000000000000015261460443036000152430ustar00rootroot00000000000000[metadata] name = acstore version = 20240407 description = Attribute Container Storage (ACStore). long_description = ACStore, or Attribute Container Storage, provides a stand-alone implementation to read and write attribute container storage files. long_description_content_type = text/plain url = https://github.com/log2timeline/acstore maintainer = Log2Timeline maintainers maintainer_email = log2timeline-maintainers@googlegroups.com license = Apache License, Version 2.0 license_files = ACKNOWLEDGEMENTS AUTHORS LICENSE README classifiers = Development Status :: 3 - Alpha Programming Language :: Python [options] install_requires = file:requirements.txt package_dir = acstore = acstore packages = find: python_requires = >=3.8 [options.packages.find] exclude = docs tests tests.* utils where = . [bdist_wheel] universal = 1 acstore-20240407/setup.py000077500000000000000000000002001460443036000151230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Installation and deployment script.""" from setuptools import setup setup() acstore-20240407/test_data/000077500000000000000000000000001460443036000153665ustar00rootroot00000000000000acstore-20240407/test_data/definitions.yaml000066400000000000000000000002411460443036000205620ustar00rootroot00000000000000# YAML-based attribute container definitions file. --- name: windows_eventlog_message_file attributes: - name: path type: str - name: windows_path type: str acstore-20240407/test_dependencies.ini000066400000000000000000000000001460443036000175720ustar00rootroot00000000000000acstore-20240407/test_requirements.txt000066400000000000000000000000001460443036000177270ustar00rootroot00000000000000acstore-20240407/tests/000077500000000000000000000000001460443036000145605ustar00rootroot00000000000000acstore-20240407/tests/__init__.py000066400000000000000000000000301460443036000166620ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/tests/containers/000077500000000000000000000000001460443036000167255ustar00rootroot00000000000000acstore-20240407/tests/containers/__init__.py000066400000000000000000000000301460443036000210270ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/tests/containers/interface.py000066400000000000000000000131341460443036000212410ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the attribute container interface.""" import unittest from acstore.containers import interface from tests import test_lib class AttributeContainerIdentifierTest(test_lib.BaseTestCase): """Tests for the attribute container identifier.""" def testCopyToString(self): """Tests the CopyToString function.""" sequence_number = id(self) identifier = interface.AttributeContainerIdentifier( name='test_container', sequence_number=sequence_number) identifier_string = identifier.CopyToString() self.assertEqual( identifier_string, f'test_container.{sequence_number:d}') class AttributeContainerTest(test_lib.BaseTestCase): """Tests for the attribute container interface.""" # pylint: disable=protected-access def testCopyToDict(self): """Tests the CopyToDict function.""" attribute_container = interface.AttributeContainer() attribute_container.attribute_name = 'MyName' attribute_container.attribute_value = 'MyValue' expected_dict = { 'attribute_name': 'MyName', 'attribute_value': 'MyValue'} test_dict = attribute_container.CopyToDict() self.assertEqual(test_dict, expected_dict) def testGetAttributeNames(self): """Tests the GetAttributeNames function.""" attribute_container = interface.AttributeContainer() attribute_container._protected_attribute = 'protected' attribute_container.attribute_name = 'MyName' attribute_container.attribute_value = 'MyValue' expected_attribute_names = ['attribute_name', 'attribute_value'] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names) setattr(attribute_container, '_SERIALIZABLE_PROTECTED_ATTRIBUTES', [ '_protected_attribute']) expected_attribute_names = [ '_protected_attribute', 'attribute_name', 'attribute_value'] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names) def testGetAttributes(self): """Tests the GetAttributes function.""" attribute_container = interface.AttributeContainer() attribute_container._protected_attribute = 'protected' attribute_container.attribute_name = 'MyName' attribute_container.attribute_value = 'MyValue' expected_attributes = [ ('attribute_name', 'MyName'), ('attribute_value', 'MyValue')] attributes = sorted(attribute_container.GetAttributes()) self.assertEqual(attributes, expected_attributes) setattr(attribute_container, '_SERIALIZABLE_PROTECTED_ATTRIBUTES', [ '_protected_attribute']) expected_attributes = [ ('_protected_attribute', 'protected'), ('attribute_name', 'MyName'), ('attribute_value', 'MyValue')] attributes = sorted(attribute_container.GetAttributes()) self.assertEqual(attributes, expected_attributes) def testGetAttributeValueHash(self): """Tests the GetAttributeValuesHash function.""" attribute_container = interface.AttributeContainer() attribute_container._protected_attribute = 'protected' attribute_container.attribute_name = 'MyName' attribute_container.attribute_value = 'MyValue' attribute_values_hash1 = attribute_container.GetAttributeValuesHash() attribute_container.attribute_value = 'changes' attribute_values_hash2 = attribute_container.GetAttributeValuesHash() self.assertNotEqual(attribute_values_hash1, attribute_values_hash2) attribute_container.attribute_value = 'MyValue' setattr(attribute_container, '_SERIALIZABLE_PROTECTED_ATTRIBUTES', [ '_protected_attribute']) attribute_values_hash2 = attribute_container.GetAttributeValuesHash() self.assertNotEqual(attribute_values_hash1, attribute_values_hash2) def testGetAttributeValuesString(self): """Tests the GetAttributeValuesString function.""" attribute_container = interface.AttributeContainer() attribute_container._protected_attribute = 'protected' attribute_container.attribute_name = 'MyName' attribute_container.attribute_value = 'MyValue' attribute_values_string1 = attribute_container.GetAttributeValuesString() attribute_container.attribute_value = 'changes' attribute_values_string2 = attribute_container.GetAttributeValuesString() self.assertNotEqual(attribute_values_string1, attribute_values_string2) attribute_container.attribute_value = 'MyValue' setattr(attribute_container, '_SERIALIZABLE_PROTECTED_ATTRIBUTES', [ '_protected_attribute']) attribute_values_string2 = attribute_container.GetAttributeValuesString() self.assertNotEqual(attribute_values_string1, attribute_values_string2) def testGetIdentifier(self): """Tests the GetIdentifier function.""" attribute_container = interface.AttributeContainer() identifier = attribute_container.GetIdentifier() self.assertIsNotNone(identifier) def testMatchesExpression(self): """Tests the MatchesExpression function.""" attribute_container = interface.AttributeContainer() attribute_container.name = 'value' result = attribute_container.MatchesExpression('name == "value"') self.assertTrue(result) result = attribute_container.MatchesExpression('name == "bogus"') self.assertFalse(result) result = attribute_container.MatchesExpression('bogus') self.assertFalse(result) def testSetIdentifier(self): """Tests the SetIdentifier function.""" attribute_container = interface.AttributeContainer() attribute_container.SetIdentifier(None) if __name__ == '__main__': unittest.main() acstore-20240407/tests/containers/manager.py000066400000000000000000000054261460443036000207200ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the attribute container manager.""" import unittest from acstore.containers import manager from tests import test_lib as shared_test_lib class AttributeContainersManagerTest(shared_test_lib.BaseTestCase): """Tests for the attribute container manager.""" # pylint: disable=protected-access _TEST_MANAGER = manager.AttributeContainersManager def testCreateAttributeContainer(self): """Tests the CreateAttributeContainer function.""" self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) try: attribute_container = self._TEST_MANAGER.CreateAttributeContainer( 'test_container') self.assertIsNotNone(attribute_container) with self.assertRaises(ValueError): self._TEST_MANAGER.CreateAttributeContainer('bogus') finally: self._TEST_MANAGER.DeregisterAttributeContainer( shared_test_lib.TestAttributeContainer) def testGetContainerTypes(self): """Tests the GetContainerTypes function.""" self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) try: container_types = self._TEST_MANAGER.GetContainerTypes() self.assertIn('test_container', container_types) finally: self._TEST_MANAGER.DeregisterAttributeContainer( shared_test_lib.TestAttributeContainer) def testGetSchema(self): """Tests the GetSchema function.""" self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) try: schema = self._TEST_MANAGER.GetSchema('test_container') self.assertIsNotNone(schema) self.assertEqual(schema, shared_test_lib.TestAttributeContainer.SCHEMA) with self.assertRaises(ValueError): self._TEST_MANAGER.GetSchema('bogus') finally: self._TEST_MANAGER.DeregisterAttributeContainer( shared_test_lib.TestAttributeContainer) def testAttributeContainerRegistration(self): """Tests the Register and DeregisterAttributeContainer functions.""" number_of_classes = len(self._TEST_MANAGER._attribute_container_classes) self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) try: self.assertEqual( len(self._TEST_MANAGER._attribute_container_classes), number_of_classes + 1) with self.assertRaises(KeyError): self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) finally: self._TEST_MANAGER.DeregisterAttributeContainer( shared_test_lib.TestAttributeContainer) self.assertEqual( len(self._TEST_MANAGER._attribute_container_classes), number_of_classes) if __name__ == '__main__': unittest.main() acstore-20240407/tests/fake_store.py000066400000000000000000000203761460443036000172640ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the fake (in-memory only) store.""" import unittest from acstore import fake_store from tests import test_lib class FakeAttributeContainerStoreTest(test_lib.BaseTestCase): """Tests for the fake (in-memory only) store.""" # pylint: disable=protected-access def testRaiseIfNotReadable(self): """Tests the _RaiseIfNotReadable function.""" test_store = fake_store.FakeAttributeContainerStore() with self.assertRaises(IOError): test_store._RaiseIfNotReadable() def testRaiseIfNotWritable(self): """Tests the _RaiseIfNotWritable function.""" test_store = fake_store.FakeAttributeContainerStore() with self.assertRaises(IOError): test_store._RaiseIfNotWritable() def testWriteExistingAttributeContainer(self): """Tests the _WriteExistingAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) with self.assertRaises(IOError): test_store._WriteExistingAttributeContainer(attribute_container) test_store._WriteNewAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store._WriteExistingAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() def testWriteNewAttributeContainer(self): """Tests the _WriteNewAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store._WriteNewAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() with self.assertRaises(IOError): test_store.AddAttributeContainer(attribute_container) def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() test_store.AddAttributeContainer(attribute_container) identifier = attribute_container.GetIdentifier() container = test_store.GetAttributeContainerByIdentifier( attribute_container.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = test_store.GetAttributeContainerByIdentifier( attribute_container.CONTAINER_TYPE, identifier) self.assertIsNone(container) test_store.Close() def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() container = test_store.GetAttributeContainerByIndex( attribute_container.CONTAINER_TYPE, 0) self.assertIsNone(container) test_store.AddAttributeContainer(attribute_container) container = test_store.GetAttributeContainerByIndex( attribute_container.CONTAINER_TYPE, 0) self.assertIsNotNone(container) test_store.Close() def testGetAttributeContainers(self): """Tests the GetAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() attribute_container.attribute = '8f0bf95a7959baad9666b21a7feed79d' test_store = fake_store.FakeAttributeContainerStore() test_store.Open() containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE)) self.assertEqual(len(containers), 0) test_store.AddAttributeContainer(attribute_container) containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE)) self.assertEqual(len(containers), 1) filter_expression = 'attribute == "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 1) filter_expression = 'attribute != "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 0) test_store.Close() def testGetNumberOfAttributeContainers(self): """Tests the GetNumberOfAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() def testHasAttributeContainers(self): """Tests the HasAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() result = test_store.HasAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(attribute_container) result = test_store.HasAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertTrue(result) test_store.Close() def testOpenClose(self): """Tests the Open and Close functions.""" test_store = fake_store.FakeAttributeContainerStore() test_store.Open() test_store.Close() test_store.Open() test_store.Close() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() test_store.Close() test_store.Open() with self.assertRaises(IOError): test_store.Open() test_store.Close() with self.assertRaises(IOError): test_store.Close() def testUpdateAttributeContainer(self): """Tests the UpdateAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() test_store = fake_store.FakeAttributeContainerStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) with self.assertRaises(IOError): test_store.UpdateAttributeContainer(attribute_container) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.UpdateAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() if __name__ == '__main__': unittest.main() acstore-20240407/tests/helpers/000077500000000000000000000000001460443036000162225ustar00rootroot00000000000000acstore-20240407/tests/helpers/__init__.py000066400000000000000000000000301460443036000203240ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/tests/helpers/json_serializer.py000066400000000000000000000035221460443036000220000ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the attribute container JSON serializer.""" import unittest from acstore.containers import manager from acstore.helpers import json_serializer from tests import test_lib as shared_test_lib class AttributeContainerJSONSerializerTest(shared_test_lib.BaseTestCase): """Tests for the attribute container JSON serializer.""" _TEST_MANAGER = manager.AttributeContainersManager _TEST_SERIALIZER = json_serializer.AttributeContainerJSONSerializer def testConvertAttributeContainerToJSON(self): """Tests the ConvertAttributeContainerToJSON function.""" attribute_container = shared_test_lib.TestAttributeContainer() attribute_container.attribute = 'MyAttribute' expected_json_dict = { '__container_type__': 'test_container', '__type__': 'AttributeContainer', 'attribute': 'MyAttribute'} json_dict = self._TEST_SERIALIZER.ConvertAttributeContainerToJSON( attribute_container) self.assertEqual(json_dict, expected_json_dict) def testConvertJSONToAttributeContainer(self): """Tests the ConvertJSONToAttributeContainer function.""" json_dict = { '__container_type__': 'test_container', '__type__': 'AttributeContainer', 'attribute': 'MyAttribute'} self._TEST_MANAGER.RegisterAttributeContainer( shared_test_lib.TestAttributeContainer) try: attribute_container = ( self._TEST_SERIALIZER.ConvertJSONToAttributeContainer(json_dict)) finally: self._TEST_MANAGER.DeregisterAttributeContainer( shared_test_lib.TestAttributeContainer) self.assertIsNotNone(attribute_container) self.assertEqual(attribute_container.CONTAINER_TYPE, 'test_container') self.assertEqual(attribute_container.attribute, 'MyAttribute') if __name__ == '__main__': unittest.main() acstore-20240407/tests/helpers/schema.py000066400000000000000000000033631460443036000200410ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the schema helper.""" import unittest from acstore.helpers import schema from tests import test_lib as shared_test_lib class SchemaHelperTest(shared_test_lib.BaseTestCase): """Tests for the schema helper.""" # pylint: disable=protected-access def testHasDataType(self): """Tests the HasDataType function.""" result = schema.SchemaHelper.HasDataType('str') self.assertTrue(result) result = schema.SchemaHelper.HasDataType('test') self.assertFalse(result) def testRegisterDataType(self): """Tests the RegisterDataType function.""" number_of_data_types = len(schema.SchemaHelper._data_types) schema.SchemaHelper.RegisterDataType('test', {'json': None}) try: self.assertEqual( len(schema.SchemaHelper._data_types), number_of_data_types + 1) with self.assertRaises(KeyError): schema.SchemaHelper.RegisterDataType('test', {'json': None}) finally: schema.SchemaHelper.DeregisterDataType('test') self.assertEqual( len(schema.SchemaHelper._data_types), number_of_data_types) def testRegisterDataTypes(self): """Tests the RegisterDataTypes function.""" number_of_data_types = len(schema.SchemaHelper._data_types) schema.SchemaHelper.RegisterDataTypes({'test': {'json': None}}) try: self.assertEqual( len(schema.SchemaHelper._data_types), number_of_data_types + 1) with self.assertRaises(KeyError): schema.SchemaHelper.RegisterDataTypes({'test': {'json': None}}) finally: schema.SchemaHelper.DeregisterDataType('test') self.assertEqual( len(schema.SchemaHelper._data_types), number_of_data_types) if __name__ == '__main__': unittest.main() acstore-20240407/tests/helpers/yaml_definitions_file.py000066400000000000000000000057541460443036000231430ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the YAML-based attribute container definitions file.""" import io import unittest from acstore import errors from acstore.helpers import yaml_definitions_file from tests import test_lib as shared_test_lib class YAMLAttributeContainerDefinitionsFileTest(shared_test_lib.BaseTestCase): """Tests for the YAML-based attribute container definitions file.""" # pylint: disable=protected-access _FORMATTERS_YAML = { 'name': 'windows_eventlog_message_file', 'attributes': [ {'name': 'path', 'type': 'str'}, {'name': 'windows_path', 'type': 'str'}]} def testReadDefinition(self): """Tests the _ReadDefinition function.""" test_definitions_file = ( yaml_definitions_file.YAMLAttributeContainerDefinitionsFile()) container_class = test_definitions_file._ReadDefinition( self._FORMATTERS_YAML) self.assertIsNotNone(container_class) self.assertEqual( container_class.CONTAINER_TYPE, 'windows_eventlog_message_file') self.assertEqual( container_class.SCHEMA, {'path': 'str', 'windows_path': 'str'}) with self.assertRaises(errors.ParseError): test_definitions_file._ReadDefinition({}) with self.assertRaises(errors.ParseError): test_definitions_file._ReadDefinition({ 'name': 'windows_eventlog_message_file', 'attributes': []}) with self.assertRaises(errors.ParseError): test_definitions_file._ReadDefinition({ 'name': 'windows_eventlog_message_file', 'attributes': [{'type': 'str'}]}) with self.assertRaises(errors.ParseError): test_definitions_file._ReadDefinition({ 'name': 'windows_eventlog_message_file', 'attributes': [{'name': 'path'}]}) with self.assertRaises(errors.ParseError): test_definitions_file._ReadDefinition({ 'name': 'windows_eventlog_message_file', 'attributes': [{'name': 'path', 'type': 'bogus'}]}) def testReadFromFileObject(self): """Tests the _ReadFromFileObject function.""" test_file_path = self._GetTestFilePath(['definitions.yaml']) self._SkipIfPathNotExists(test_file_path) test_definitions_file = ( yaml_definitions_file.YAMLAttributeContainerDefinitionsFile()) with io.open(test_file_path, 'r', encoding='utf-8') as file_object: definitions = list(test_definitions_file._ReadFromFileObject(file_object)) self.assertEqual(len(definitions), 1) def testReadFromFile(self): """Tests the ReadFromFile function.""" test_file_path = self._GetTestFilePath(['definitions.yaml']) self._SkipIfPathNotExists(test_file_path) test_definitions_file = ( yaml_definitions_file.YAMLAttributeContainerDefinitionsFile()) definitions = list(test_definitions_file.ReadFromFile(test_file_path)) self.assertEqual(len(definitions), 1) self.assertEqual( definitions[0].CONTAINER_TYPE, 'windows_eventlog_message_file') if __name__ == '__main__': unittest.main() acstore-20240407/tests/interface.py000066400000000000000000000064041460443036000170760ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the attribute container store interface.""" import unittest from acstore import interface from acstore.containers import manager from tests import test_lib class AttributeContainerStoreTest(test_lib.BaseTestCase): """Tests for the attribute container store interface.""" # pylint: disable=protected-access def testGetAttributeContainerNextSequenceNumber(self): """Tests the _GetAttributeContainerNextSequenceNumber function.""" attribute_container = test_lib.TestAttributeContainer() test_store = interface.AttributeContainerStore() sequence_number = test_store._GetAttributeContainerNextSequenceNumber( attribute_container.CONTAINER_TYPE) self.assertEqual(sequence_number, 1) sequence_number = test_store._GetAttributeContainerNextSequenceNumber( attribute_container.CONTAINER_TYPE) self.assertEqual(sequence_number, 2) def testGetAttributeContainerSchema(self): """Tests the _GetAttributeContainerSchema function.""" attribute_container = test_lib.TestAttributeContainer() test_store = interface.AttributeContainerStore() schema = test_store._GetAttributeContainerSchema( attribute_container.CONTAINER_TYPE) self.assertEqual(schema, {}) manager.AttributeContainersManager.RegisterAttributeContainer( test_lib.TestAttributeContainer) try: schema = test_store._GetAttributeContainerSchema( attribute_container.CONTAINER_TYPE) self.assertEqual(schema, test_lib.TestAttributeContainer.SCHEMA) finally: manager.AttributeContainersManager.DeregisterAttributeContainer( test_lib.TestAttributeContainer) # TODO: add tests for _SetAttributeContainerNextSequenceNumber def testSetStorageProfiler(self): """Tests the SetStorageProfiler function.""" test_store = interface.AttributeContainerStore() test_store.SetStorageProfiler(None) class AttributeContainerStoreWithReadCacheTest(test_lib.BaseTestCase): """Tests for the attribute container store with read cache.""" # pylint: disable=protected-access def testCacheAttributeContainerByIndex(self): """Tests the _CacheAttributeContainerByIndex function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory(): test_store = interface.AttributeContainerStoreWithReadCache() self.assertEqual(len(test_store._attribute_container_cache), 0) test_store._CacheAttributeContainerByIndex(attribute_container, 0) self.assertEqual(len(test_store._attribute_container_cache), 1) def testGetCachedAttributeContainer(self): """Tests the _GetCachedAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory(): test_store = interface.AttributeContainerStoreWithReadCache() cached_container = test_store._GetCachedAttributeContainer( attribute_container.CONTAINER_TYPE, 1) self.assertIsNone(cached_container) test_store._CacheAttributeContainerByIndex(attribute_container, 1) cached_container = test_store._GetCachedAttributeContainer( attribute_container.CONTAINER_TYPE, 1) self.assertIsNotNone(cached_container) if __name__ == '__main__': unittest.main() acstore-20240407/tests/profilers.py000066400000000000000000000031611460443036000171400ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the profiler classes.""" import time import unittest from acstore import profilers from tests import test_lib class CPUTimeMeasurementTest(test_lib.BaseTestCase): """Tests for the CPU time measurement.""" def testSampleStartStop(self): """Tests the SampleStart and SampleStop functions.""" cpu_measurement = profilers.CPUTimeMeasurement() cpu_measurement.SampleStart() cpu_measurement.SampleStop() class StorageProfilerTest(test_lib.BaseTestCase): """Tests for the storage profiler.""" # pylint: disable=protected-access def testIsSupported(self): """Tests the IsSupported function.""" self.assertTrue(profilers.StorageProfiler.IsSupported()) def testStartStop(self): """Tests the Start and Stop functions.""" with test_lib.TempDirectory() as temp_directory: test_profiler = profilers.StorageProfiler( 'test', temp_directory) setattr(test_profiler, '_FILENAME_PREFIX', 'test') setattr(test_profiler, '_FILE_HEADER', 'test') test_profiler.Start() test_profiler.Stop() def testSample(self): """Tests the Sample function.""" with test_lib.TempDirectory() as temp_directory: test_profiler = profilers.StorageProfiler( 'test', temp_directory) test_profiler.Start() for _ in range(5): test_profiler.StartTiming('test_profile') time.sleep(0.01) test_profiler.StopTiming('test_profile') test_profiler.Sample('test_profile', 'read', 'test', 1024, 128) test_profiler.Stop() if __name__ == '__main__': unittest.main() acstore-20240407/tests/sqlite_store.py000066400000000000000000000471321460443036000176560ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the SQLite-based attribute container store.""" import os import unittest from acstore import sqlite_store from acstore.containers import manager as containers_manager from tests import test_lib class _TestSQLiteAttributeContainerStoreV20220716( sqlite_store.SQLiteAttributeContainerStore): """Test class for testing format compatibility checks.""" _FORMAT_VERSION = 20220716 _APPEND_COMPATIBLE_FORMAT_VERSION = 20211121 _UPGRADE_COMPATIBLE_FORMAT_VERSION = 20211121 _READ_COMPATIBLE_FORMAT_VERSION = 20211121 class _TestSQLiteAttributeContainerStoreV20221023( sqlite_store.SQLiteAttributeContainerStore): """Test class for testing format compatibility checks.""" _FORMAT_VERSION = 20221023 _APPEND_COMPATIBLE_FORMAT_VERSION = 20221023 _UPGRADE_COMPATIBLE_FORMAT_VERSION = 20221023 _READ_COMPATIBLE_FORMAT_VERSION = 20211121 # TODO add tests for PythonAST2SQL. class SQLiteSchemaHelperTest(test_lib.BaseTestCase): """Tests for the SQLite schema helper.""" # pylint: disable=protected-access def testGetStorageDataType(self): """Tests the GetStorageDataType function.""" schema_helper = sqlite_store.SQLiteSchemaHelper() data_type = schema_helper.GetStorageDataType('bool') self.assertEqual(data_type, 'INTEGER') data_type = schema_helper.GetStorageDataType('int') self.assertEqual(data_type, 'INTEGER') data_type = schema_helper.GetStorageDataType('str') self.assertEqual(data_type, 'TEXT') data_type = schema_helper.GetStorageDataType('timestamp') self.assertEqual(data_type, 'BIGINT') data_type = schema_helper.GetStorageDataType('AttributeContainerIdentifier') self.assertEqual(data_type, 'TEXT') def testDeserializeValue(self): """Tests the DeserializeValue function.""" schema_helper = sqlite_store.SQLiteSchemaHelper() value = schema_helper.DeserializeValue('bool', 0) self.assertFalse(value) value = schema_helper.DeserializeValue('bool', 1) self.assertTrue(value) value = schema_helper.DeserializeValue('int', 1) self.assertEqual(value, 1) value = schema_helper.DeserializeValue('str', 'one') self.assertEqual(value, 'one') value = schema_helper.DeserializeValue('timestamp', 1) self.assertEqual(value, 1) # TODO: add test for AttributeContainerIdentifier def testSerializeValue(self): """Tests the SerializeValue function.""" schema_helper = sqlite_store.SQLiteSchemaHelper() value = schema_helper.SerializeValue('bool', False) self.assertEqual(value, 0) value = schema_helper.SerializeValue('bool', True) self.assertEqual(value, 1) value = schema_helper.SerializeValue('int', 1) self.assertEqual(value, 1) value = schema_helper.SerializeValue('str', 'one') self.assertEqual(value, 'one') value = schema_helper.SerializeValue('timestamp', 1) self.assertEqual(value, 1) # TODO: add test for AttributeContainerIdentifier class SQLiteAttributeContainerStoreTest(test_lib.BaseTestCase): """Tests for the SQLite-based storage file object.""" # pylint: disable=protected-access def setUp(self): """Sets up the needed objects used throughout the test.""" containers_manager.AttributeContainersManager.RegisterAttributeContainer( test_lib.TestAttributeContainer) def tearDown(self): """Cleans up the needed objects used throughout the test.""" containers_manager.AttributeContainersManager.DeregisterAttributeContainer( test_lib.TestAttributeContainer) def testCheckStorageMetadata(self): """Tests the _CheckStorageMetadata function.""" with test_lib.TempDirectory(): test_store = sqlite_store.SQLiteAttributeContainerStore() metadata_values = { 'format_version': f'{test_store._FORMAT_VERSION:d}', 'serialization_format': 'json'} test_store._CheckStorageMetadata(metadata_values) metadata_values['format_version'] = 'bogus' with self.assertRaises(IOError): test_store._CheckStorageMetadata(metadata_values) metadata_values['format_version'] = '1' with self.assertRaises(IOError): test_store._CheckStorageMetadata(metadata_values) metadata_values['format_version'] = f'{test_store._FORMAT_VERSION:d}' metadata_values['serialization_format'] = 'bogus' with self.assertRaises(IOError): test_store._CheckStorageMetadata(metadata_values) metadata_values['serialization_format'] = 'json' def testCreateAttributeContainerTable(self): """Tests the _CreateAttributeContainerTable function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: test_store._CreateAttributeContainerTable( attribute_container.CONTAINER_TYPE) with self.assertRaises(IOError): test_store._CreateAttributeContainerTable( attribute_container.CONTAINER_TYPE) finally: test_store.Close() # TODO: add tests for _CreateAttributeContainerFromRow # TODO: add tests for _Flush # TODO: add tests for _FlushWriteCache def testGetAttributeContainersWithFilter(self): """Tests the _GetAttributeContainersWithFilter function.""" attribute_container = test_lib.TestAttributeContainer() attribute_container.attribute = '8f0bf95a7959baad9666b21a7feed79d' column_names = ['attribute'] with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: containers = list(test_store._GetAttributeContainersWithFilter( attribute_container.CONTAINER_TYPE, column_names=column_names)) self.assertEqual(len(containers), 0) test_store.AddAttributeContainer(attribute_container) containers = list(test_store._GetAttributeContainersWithFilter( attribute_container.CONTAINER_TYPE, column_names=column_names)) self.assertEqual(len(containers), 1) filter_expression = 'attribute == "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store._GetAttributeContainersWithFilter( attribute_container.CONTAINER_TYPE, column_names=column_names, filter_expression=filter_expression)) self.assertEqual(len(containers), 1) filter_expression = 'attribute != "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store._GetAttributeContainersWithFilter( attribute_container.CONTAINER_TYPE, column_names=column_names, filter_expression=filter_expression)) self.assertEqual(len(containers), 0) containers = list(test_store._GetAttributeContainersWithFilter( 'bogus', column_names=column_names)) self.assertEqual(len(containers), 0) finally: test_store.Close() def testGetNumberOfAttributeContainerRows(self): """Tests the _GetNumberOfAttributeContainerRows function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) # Test for a supported container type that does not have a table # present in the storage file. query = f'DROP TABLE {attribute_container.CONTAINER_TYPE:s}' test_store._cursor.execute(query) number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) finally: test_store.Close() def testHasTable(self): """Tests the _HasTable function.""" with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: test_store._CreateAttributeContainerTable('test_container') result = test_store._HasTable('test_container') self.assertTrue(result) result = test_store._HasTable('bogus') self.assertFalse(result) finally: test_store.Close() def testRaiseIfNotReadable(self): """Tests the _RaiseIfNotReadable function.""" test_store = sqlite_store.SQLiteAttributeContainerStore() with self.assertRaises(IOError): test_store._RaiseIfNotReadable() def testRaiseIfNotWritable(self): """Tests the _RaiseIfNotWritable function.""" test_store = sqlite_store.SQLiteAttributeContainerStore() with self.assertRaises(IOError): test_store._RaiseIfNotWritable() # TODO: add tests for _ReadAndCheckStorageMetadata # TODO: add tests for _ReadMetadata # TODO: add tests for _UpdateStorageMetadataFormatVersion def testWriteExistingAttributeContainer(self): """Tests the _WriteExistingAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store._WriteNewAttributeContainer(attribute_container) number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store._WriteExistingAttributeContainer(attribute_container) number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() # TODO: add tests for _WriteMetadata # TODO: add tests for _WriteMetadataValue def testWriteNewAttributeContainer(self): """Tests the _WriteNewAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store._WriteNewAttributeContainer(attribute_container) number_of_containers = test_store._GetNumberOfAttributeContainerRows( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() with self.assertRaises(IOError): test_store.AddAttributeContainer(attribute_container) # TODO: add tests for CheckSupportedFormat def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: test_store.AddAttributeContainer(attribute_container) identifier = attribute_container.GetIdentifier() container = test_store.GetAttributeContainerByIdentifier( attribute_container.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = test_store.GetAttributeContainerByIdentifier( attribute_container.CONTAINER_TYPE, identifier) self.assertIsNone(container) finally: test_store.Close() def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: container = test_store.GetAttributeContainerByIndex( attribute_container.CONTAINER_TYPE, 0) self.assertIsNone(container) test_store.AddAttributeContainer(attribute_container) container = test_store.GetAttributeContainerByIndex( attribute_container.CONTAINER_TYPE, 0) self.assertIsNotNone(container) container = test_store.GetAttributeContainerByIndex('bogus', 0) self.assertIsNone(container) finally: test_store.Close() def testGetAttributeContainers(self): """Tests the GetAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() attribute_container.attribute = '8f0bf95a7959baad9666b21a7feed79d' with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE)) self.assertEqual(len(containers), 0) test_store.AddAttributeContainer(attribute_container) containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE)) self.assertEqual(len(containers), 1) filter_expression = 'attribute == "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 1) filter_expression = 'attribute != "8f0bf95a7959baad9666b21a7feed79d"' containers = list(test_store.GetAttributeContainers( attribute_container.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 0) with self.assertRaises(IOError): list(test_store.GetAttributeContainers('bogus')) finally: test_store.Close() def testGetNumberOfAttributeContainers(self): """Tests the GetNumberOfAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) number_of_containers = test_store.GetNumberOfAttributeContainers( 'bogus') self.assertEqual(number_of_containers, 0) finally: test_store.Close() def testHasAttributeContainers(self): """Tests the HasAttributeContainers function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: result = test_store.HasAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(attribute_container) result = test_store.HasAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertTrue(result) result = test_store.HasAttributeContainers('bogus') self.assertFalse(result) finally: test_store.Close() # TODO: add tests for Open and Close def testUpdateAttributeContainer(self): """Tests the UpdateAttributeContainer function.""" attribute_container = test_lib.TestAttributeContainer() with test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'acstore.sqlite') test_store = sqlite_store.SQLiteAttributeContainerStore() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.UpdateAttributeContainer(attribute_container) number_of_containers = test_store.GetNumberOfAttributeContainers( attribute_container.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() def testVersionCompatibility(self): """Tests the version compatibility methods.""" with test_lib.TempDirectory() as temp_directory: v1_storage_path = os.path.join(temp_directory, 'v20220716.sqlite') v1_test_store = _TestSQLiteAttributeContainerStoreV20220716() v1_test_store.Open(path=v1_storage_path, read_only=False) v1_test_store.Close() v2_test_store_rw = _TestSQLiteAttributeContainerStoreV20221023() with self.assertRaises((IOError, OSError)): v2_test_store_rw.Open(path=v1_storage_path, read_only=False) v2_test_store_ro = _TestSQLiteAttributeContainerStoreV20221023() v2_test_store_ro.Open(path=v1_storage_path, read_only=True) v2_test_store_ro.Close() if __name__ == '__main__': unittest.main() acstore-20240407/tests/test_lib.py000066400000000000000000000045071460443036000167450ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Functions and classes for testing.""" import os import shutil import tempfile import unittest from acstore.containers import interface as containers_interface # The path to top of the dfWinReg source tree. PROJECT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) # The paths below are all derived from the project path directory. # They are enumerated explicitly here so that they can be overwritten for # compatibility with different build systems. TEST_DATA_PATH = os.path.join(PROJECT_PATH, 'test_data') class TestAttributeContainer(containers_interface.AttributeContainer): """Attribute container for testing purposes. Attributes: attribute (str): attribute for testing purposes. """ CONTAINER_TYPE = 'test_container' SCHEMA = {'attribute': 'str'} def __init__(self): """Initializes an attribute container.""" super(TestAttributeContainer, self).__init__() self.attribute = None class BaseTestCase(unittest.TestCase): """The base test case.""" # Show full diff results. maxDiff = None def _GetTestFilePath(self, path_segments): """Retrieves the path of a test file relative to the test data directory. Args: path_segments (list[str]): path segments inside the test data directory. Returns: str: path of the test file. """ # Note that we need to pass the individual path segments to os.path.join # and not a list. return os.path.join(TEST_DATA_PATH, *path_segments) def _SkipIfPathNotExists(self, path): """Skips the test if the path does not exist. Args: path (str): path of a test file. Raises: SkipTest: if the path does not exist and the test should be skipped. """ if not os.path.exists(path): filename = os.path.basename(path) raise unittest.SkipTest(f'missing test file: {filename:s}') class TempDirectory(object): """Class that implements a temporary directory.""" def __init__(self): """Initializes a temporary directory.""" super(TempDirectory, self).__init__() self.name = '' def __enter__(self): """Make this work with the 'with' statement.""" self.name = tempfile.mkdtemp() return self.name def __exit__(self, exception_type, value, traceback): """Make this work with the 'with' statement.""" shutil.rmtree(self.name, True) acstore-20240407/tox.ini000066400000000000000000000025171460443036000147360ustar00rootroot00000000000000[tox] envlist = py3{8,9,10,11,12},coverage,docformatter,docs,lint,wheel [testenv] allowlist_externals = ./run_tests.py pip_pre = True passenv = CFLAGS CPPFLAGS LDFLAGS setenv = PYTHONPATH = {toxinidir} deps = -rrequirements.txt -rtest_requirements.txt coverage: coverage wheel: build setuptools >= 65 wheel commands = py3{8,9,10,11,12}: ./run_tests.py coverage: coverage erase coverage: coverage run --source=acstore --omit="*_test*,*__init__*,*test_lib*" run_tests.py coverage: coverage xml wheel: python -m build --no-isolation --wheel [testenv:docformatter] usedevelop = True deps = docformatter commands = docformatter --in-place --recursive acstore tests [testenv:docs] usedevelop = True deps = -rdocs/requirements.txt commands = sphinx-build -b html -d build/doctrees docs dist/docs sphinx-build -b linkcheck docs dist/docs [testenv:lint] skipsdist = True pip_pre = True passenv = CFLAGS CPPFLAGS LDFLAGS setenv = PYTHONPATH = {toxinidir} deps = -rrequirements.txt -rtest_requirements.txt docformatter pylint >= 3.0.0, < 3.1.0 setuptools yamllint >= 1.26.0 commands = docformatter --version pylint --version yamllint -v docformatter --check --diff --recursive acstore setup.py tests pylint --rcfile=.pylintrc acstore setup.py tests yamllint -c .yamllint.yaml test_data acstore-20240407/utils/000077500000000000000000000000001460443036000145565ustar00rootroot00000000000000acstore-20240407/utils/__init__.py000066400000000000000000000000301460443036000166600ustar00rootroot00000000000000# -*- coding: utf-8 -*- acstore-20240407/utils/check_dependencies.py000077500000000000000000000006461460443036000207240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to check for the availability and version of dependencies.""" import sys # Change PYTHONPATH to include dependencies. sys.path.insert(0, '.') import utils.dependencies # pylint: disable=wrong-import-position if __name__ == '__main__': dependency_helper = utils.dependencies.DependencyHelper() if not dependency_helper.CheckDependencies(): sys.exit(1) acstore-20240407/utils/dependencies.py000066400000000000000000000264771460443036000175760ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Helper to check for availability and version of dependencies.""" import configparser import os import re class DependencyDefinition(object): """Dependency definition. Attributes: dpkg_name (str): name of the dpkg package that provides the dependency. is_optional (bool): True if the dependency is optional. l2tbinaries_name (str): name of the l2tbinaries package that provides the dependency. maximum_version (str): maximum supported version, a greater or equal version is not supported. minimum_version (str): minimum supported version, a lesser version is not supported. name (str): name of (the Python module that provides) the dependency. pypi_name (str): name of the PyPI package that provides the dependency. python2_only (bool): True if the dependency is only supported by Python 2. python3_only (bool): True if the dependency is only supported by Python 3. rpm_name (str): name of the rpm package that provides the dependency. skip_check (bool): True if the dependency should be skipped by the CheckDependencies or CheckTestDependencies methods of DependencyHelper. skip_requires (bool): True if the dependency should be excluded from requirements.txt or setup.py install_requires. version_property (str): name of the version attribute or function. """ def __init__(self, name): """Initializes a dependency configuration. Args: name (str): name of the dependency. """ super(DependencyDefinition, self).__init__() self.dpkg_name = None self.is_optional = False self.l2tbinaries_name = None self.maximum_version = None self.minimum_version = None self.name = name self.pypi_name = None self.python2_only = False self.python3_only = False self.rpm_name = None self.skip_check = None self.skip_requires = None self.version_property = None class DependencyDefinitionReader(object): """Dependency definition reader.""" _VALUE_NAMES = frozenset([ 'dpkg_name', 'is_optional', 'l2tbinaries_name', 'maximum_version', 'minimum_version', 'pypi_name', 'python2_only', 'python3_only', 'rpm_name', 'skip_check', 'skip_requires', 'version_property']) def _GetConfigValue(self, config_parser, section_name, value_name): """Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists. """ try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None def Read(self, file_object): """Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition. """ config_parser = configparser.ConfigParser(interpolation=None) config_parser.read_file(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition class DependencyHelper(object): """Dependency helper. Attributes: dependencies (dict[str, DependencyDefinition]): dependencies. """ _VERSION_NUMBERS_REGEX = re.compile(r'[0-9.]+') _VERSION_SPLIT_REGEX = re.compile(r'\.|\-') def __init__( self, dependencies_file='dependencies.ini', test_dependencies_file='test_dependencies.ini'): """Initializes a dependency helper. Args: dependencies_file (Optional[str]): path to the dependencies configuration file. test_dependencies_file (Optional[str]): path to the test dependencies configuration file. """ super(DependencyHelper, self).__init__() self._test_dependencies = {} self.dependencies = {} dependency_reader = DependencyDefinitionReader() with open(dependencies_file, 'r', encoding='utf-8') as file_object: for dependency in dependency_reader.Read(file_object): self.dependencies[dependency.name] = dependency if os.path.exists(test_dependencies_file): with open(test_dependencies_file, 'r', encoding='utf-8') as file_object: for dependency in dependency_reader.Read(file_object): self._test_dependencies[dependency.name] = dependency def _CheckPythonModule(self, dependency): """Checks the availability of a Python module. Args: dependency (DependencyDefinition): dependency definition. Returns: tuple: containing: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_object = self._ImportPythonModule(dependency.name) if not module_object: return False, f'missing: {dependency.name:s}' if not dependency.version_property: return True, dependency.name return self._CheckPythonModuleVersion( dependency.name, module_object, dependency.version_property, dependency.minimum_version, dependency.maximum_version) def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): """Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: containing: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: return False, ( f'unable to determine version information for: {module_name:s}') # Make sure the module version is a string. module_version = f'{module_version!s}' # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: return False, ( f'unable to parse module version: {module_name:s} {module_version:s}') if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: return False, ( f'unable to parse minimum version: {module_name:s} ' f'{minimum_version:s}') if module_version_map < minimum_version_map: return False, ( f'{module_name:s} version: {module_version!s} is too old, ' f'{minimum_version!s} or later required') if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: return False, ( f'unable to parse maximum version: {module_name:s} ' f'{maximum_version:s}') if module_version_map > maximum_version_map: return False, ( f'{module_name:s} version: {module_version!s} is too recent, ' f'{maximum_version!s} or earlier required') return True, f'{module_name:s} version: {module_version!s}' def _ImportPythonModule(self, module_name): """Imports a Python module. Args: module_name (str): name of the module. Returns: module: Python module or None if the module cannot be imported. """ try: module_object = list(map(__import__, [module_name]))[0] except ImportError: return None # If the module name contains dots get the upper most module object. if '.' in module_name: for submodule_name in module_name.split('.')[1:]: module_object = getattr(module_object, submodule_name, None) return module_object def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): """Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose. """ if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print(f'{status_indicator:s}\t{status_message:s}') elif verbose_output: print(f'[OK]\t\t{status_message:s}') def CheckDependencies(self, verbose_output=True): """Checks the availability of the dependencies. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ print('Checking availability and versions of dependencies.') check_result = True for _, dependency in sorted(self.dependencies.items()): if dependency.skip_check: continue result, status_message = self._CheckPythonModule(dependency) if not result and not dependency.is_optional: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result def CheckTestDependencies(self, verbose_output=True): """Checks the availability of the dependencies when running tests. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ if not self.CheckDependencies(verbose_output=verbose_output): return False print('Checking availability and versions of test dependencies.') check_result = True for dependency in sorted( self._test_dependencies.values(), key=lambda dependency: dependency.name): if dependency.skip_check: continue result, status_message = self._CheckPythonModule(dependency) if not result and not dependency.is_optional: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result acstore-20240407/utils/update_release.sh000077500000000000000000000014051460443036000200770ustar00rootroot00000000000000#!/bin/bash # # Script that makes changes in preparation of a new release, such as updating # the version and documentation. EXIT_FAILURE=1; EXIT_SUCCESS=0; VERSION=`date -u +"%Y%m%d"` DPKG_DATE=`date -R` # Update the Python module version. sed "s/__version__ = '[0-9]*'/__version__ = '${VERSION}'/" -i acstore/__init__.py # Update the version in the setuptools configuration. sed "s/version = [0-9]*/version = ${VERSION}/" -i setup.cfg # Update the version in the dpkg configuration files. cat > config/dpkg/changelog << EOT acstore (${VERSION}-1) unstable; urgency=low * Auto-generated -- Log2Timeline maintainers ${DPKG_DATE} EOT # Regenerate the API documentation. tox -edocformatter,docs exit ${EXIT_SUCCESS};