pax_global_header00006660000000000000000000000064141312750050014510gustar00rootroot0000000000000052 comment=70955da02ee7a30f4c86b2f4bece0447f0ca3e89 ospd-openvas-21.4.3/000077500000000000000000000000001413127500500142155ustar00rootroot00000000000000ospd-openvas-21.4.3/.coveragerc000066400000000000000000000001061413127500500163330ustar00rootroot00000000000000[run] omit = tests/* */__init__.py source = ospd_openvas ospd-openvas-21.4.3/.github/000077500000000000000000000000001413127500500155555ustar00rootroot00000000000000ospd-openvas-21.4.3/.github/CODEOWNERS000066400000000000000000000001021413127500500171410ustar00rootroot00000000000000# default reviewers * @greenbone/ospd-maintainers ospd-openvas-21.4.3/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000014631413127500500213620ustar00rootroot00000000000000**What**: **Why**: **How**: **Checklist**: - [ ] Tests - [ ] [CHANGELOG](https://github.com/greenbone/gsa/blob/master/CHANGELOG.md) Entry ospd-openvas-21.4.3/.github/dependabot.yml000066400000000000000000000002201413127500500203770ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly time: "04:00" open-pull-requests-limit: 10 ospd-openvas-21.4.3/.github/workflows/000077500000000000000000000000001413127500500176125ustar00rootroot00000000000000ospd-openvas-21.4.3/.github/workflows/ci-python.yml000066400000000000000000000034271413127500500222550ustar00rootroot00000000000000name: Build and test Python package on: push: branches: [ master, ospd-openvas-21.04, ospd-openvas-20.08 ] pull_request: branches: [ master, ospd-openvas-21.04, ospd-openvas-20.08 ] jobs: linting: name: Linting runs-on: 'ubuntu-latest' strategy: matrix: python-version: - 3.7 - 3.8 - 3.9 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install poetry and dependencies uses: greenbone/actions/poetry@v1 - name: Check with black, pylint and pontos.version uses: greenbone/actions/lint-python@v1 with: packages: ospd_openvas tests test: name: Run all tests runs-on: 'ubuntu-latest' strategy: matrix: python-version: - 3.7 - 3.8 - 3.9 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install poetry and dependencies uses: greenbone/actions/poetry@v1 - name: Run unit tests run: poetry run python -m unittest codecov: name: Upload coverage to codecov.io needs: test runs-on: 'ubuntu-latest' steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install poetry and dependencies uses: greenbone/actions/poetry@v1 - name: Calculate and upload coverage to codecov.io uses: greenbone/actions/coverage-python@v1 ospd-openvas-21.4.3/.github/workflows/codeql-analysis-python.yml000066400000000000000000000013521413127500500247450ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ master, ospd-openvas-21.04, ospd-openvas-20.08 ] pull_request: branches: [ master, ospd-openvas-21.04, ospd-openvas-20.08 ] schedule: - cron: '30 5 * * 0' # 5:30h on Sundays jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'python' ] steps: - name: Checkout repository uses: actions/checkout@v2 - name: Initialize CodeQL uses: github/codeql-action/init@v1 with: languages: ${{ matrix.language }} - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v1 ospd-openvas-21.4.3/.github/workflows/deploy-pypi.yml000066400000000000000000000011511413127500500226060ustar00rootroot00000000000000name: Deploy on PyPI on: release: types: [created] jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade poetry python -m pip install --upgrade twine - name: Build and publish env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} run: | poetry build twine upload dist/* ospd-openvas-21.4.3/.github/workflows/release-pontos-manually.yml000066400000000000000000000036421413127500500251220ustar00rootroot00000000000000name: Manually release gvm-libs with pontos on: workflow_dispatch: jobs: release-patch: env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_MAIL: ${{ secrets.GREENBONE_BOT_MAIL }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} GPG_KEY: ${{ secrets.GPG_KEY }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} name: Release patch with pontos runs-on: 'ubuntu-latest' steps: - uses: actions/checkout@v2 with: persist-credentials: false - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install poetry and dependencies uses: greenbone/actions/poetry@v1 - name: Tell git who I am run: | git config --global user.name "${{ env.GITHUB_USER }}" git config --global user.email "${{ env.GITHUB_MAIL }}" git remote set-url origin https://${{ env.GITHUB_TOKEN }}@github.com/${{ github.repository }} - run: echo "Current Branch is $GITHUB_BASE_REF" - name: Prepare patch release with pontos run: | poetry run pontos-release prepare --patch echo "VERSION=$(poetry run pontos-version show)" >> $GITHUB_ENV - name: Release with pontos run: | poetry run pontos-release release - name: Import key from secrets run: | echo -e "${{ env.GPG_KEY }}" >> tmp.file gpg \ --pinentry-mode loopback \ --passphrase ${{ env.GPG_PASSPHRASE }} \ --import tmp.file rm tmp.file - name: Sign with pontos-release sign run: | echo "Signing assets for ${{env.VERSION}}" poetry run pontos-release sign \ --signing-key ${{ env.GPG_FINGERPRINT }} \ --passphrase ${{ env.GPG_PASSPHRASE }} \ --release-version ${{ env.VERSION }} ospd-openvas-21.4.3/.github/workflows/release-pontos-patch.yml000066400000000000000000000043121413127500500243720ustar00rootroot00000000000000name: Release Python package patch version with pontos on: pull_request: types: [closed] branches: [ 'ospd-openvas-21.04', 'ospd-openvas-20.08' ] jobs: release-patch: env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_MAIL: ${{ secrets.GREENBONE_BOT_MAIL }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} GPG_KEY: ${{ secrets.GPG_KEY }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} name: Release patch with pontos # If the label 'make patch release' is set. If PR is closed because of an merge if: contains( github.event.pull_request.labels.*.name, 'make patch release') && github.event.pull_request.merged == true runs-on: 'ubuntu-latest' steps: - uses: actions/checkout@v2 with: persist-credentials: false - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install poetry and dependencies uses: greenbone/actions/poetry@v1 - name: Tell git who I am run: | git config --global user.name "${{ env.GITHUB_USER }}" git config --global user.email "${{ env.GITHUB_MAIL }}" git remote set-url origin https://${{ env.GITHUB_TOKEN }}@github.com/${{ github.repository }} - run: echo "Current Branch is $GITHUB_BASE_REF" - name: Prepare patch release with pontos run: | poetry run pontos-release prepare --patch echo "VERSION=$(poetry run pontos-version show)" >> $GITHUB_ENV - name: Release with pontos run: | poetry run pontos-release release - name: Import key from secrets run: | echo -e "${{ env.GPG_KEY }}" >> tmp.file gpg \ --pinentry-mode loopback \ --passphrase ${{ env.GPG_PASSPHRASE }} \ --import tmp.file rm tmp.file - name: Sign with pontos-release sign run: | echo "Signing assets for ${{env.VERSION}}" poetry run pontos-release sign \ --signing-key ${{ env.GPG_FINGERPRINT }} \ --passphrase ${{ env.GPG_PASSPHRASE }} \ --release-version ${{ env.VERSION }} ospd-openvas-21.4.3/.gitignore000066400000000000000000000001211413127500500161770ustar00rootroot00000000000000__pycache__ *.pyc *.log .egg *.egg-info dist build .idea .vscode .coverage .venv ospd-openvas-21.4.3/.pylintrc000066400000000000000000000346651413127500500161000ustar00rootroot00000000000000[MASTER] # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist=lxml # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns=docs # Pickle collected data for later comparisons. persistent=yes # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" # # bad-continuation is disabled because of a bug in pylint. # See https://github.com/ambv/black/issues/48 and https://github.com/PyCQA/pylint/issues/289 disable=len-as-condition, attribute-defined-outside-init, missing-docstring, bad-continuation, R #disable=print-statement, # parameter-unpacking, # unpacking-in-except, # old-raise-syntax, # backtick, # long-suffix, # old-ne-operator, # old-octal-literal, # import-star-module-level, # non-ascii-bytes-literal, # raw-checker-failed, # bad-inline-option, # locally-disabled, # locally-enabled, # file-ignored, # suppressed-message, # useless-suppression, # deprecated-pragma, # apply-builtin, # basestring-builtin, # buffer-builtin, # cmp-builtin, # coerce-builtin, # execfile-builtin, # file-builtin, # long-builtin, # raw_input-builtin, # reduce-builtin, # standarderror-builtin, # unicode-builtin, # xrange-builtin, # coerce-method, # delslice-method, # getslice-method, # setslice-method, # no-absolute-import, # old-division, # dict-iter-method, # dict-view-method, # next-method-called, # metaclass-assignment, # indexing-exception, # raising-string, # reload-builtin, # oct-method, # hex-method, # nonzero-method, # cmp-method, # input-builtin, # round-builtin, # intern-builtin, # unichr-builtin, # map-builtin-not-iterating, # zip-builtin-not-iterating, # range-builtin-not-iterating, # filter-builtin-not-iterating, # using-cmp-argument, # eq-without-hash, # div-method, # idiv-method, # rdiv-method, # exception-message-attribute, # invalid-str-codec, # sys-max-int, # bad-python3-import, # deprecated-string-function, # deprecated-str-translate-call, # deprecated-itertools-function, # deprecated-types-field, # next-method-defined, # dict-items-not-iterating, # dict-keys-not-iterating, # dict-values-not-iterating # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [REPORTS] # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio).You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Tells whether to display a full report or only the messages reports=no # Activate the evaluation score. score=no [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=optparse.Values,sys.exit [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO [BASIC] # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]{1,40}$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-z0-9_]{1,40}$ # Bad variable names which should always be refused, separated by a comma bad-names=foo, bar, baz, toto, tutu, tata # Regular expression matching correct class attribute names. class-attribute-rgx=([a-z_][a-z0-9_]{1,40})|([A-Z_][A-Z0-9_]{1,30})$ # Naming style matching correct class names class-naming-style=PascalCase # Naming style matching correct constant names const-naming-style=UPPER_CASE # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=3 # Regular expression which should only match correct function names function-rgx=[a-z_][a-z0-9_]+$ # Good variable names which should always be accepted, separated by a comma good-names=e, f, i, j, k, ex, Run, logger, _ # Include a hint for the correct naming format with invalid-name include-naming-hint=yes # Regular expression matching correct inline iteration names. inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression which should only match correct method names method-rgx=[a-z_][a-z0-9_]+$ # Regular expression which should only match correct module names module-rgx=([a-z]+)|(test_*)$ # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. property-classes=abc.abstractproperty # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]+$ [SIMILARITIES] # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no # Minimum lines number of a similarity. min-similarity-lines=4 [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format=LF # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=80 # Maximum number of lines in a module max-module-lines=1000 # List of optional constructs for which whitespace checking is disabled. `dict- # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. # `trailing-comma` allows a space between comma and closing bracket: (a, ). # `empty-line` allows space-only lines. no-space-check=trailing-comma, dict-separator # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Deprecated modules which should not be used, separated by a comma deprecated-modules=optparse,tkinter.tix # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant [DESIGN] # Maximum number of arguments for function / method max-args=15 # Maximum number of attributes for a class (see R0902). max-attributes=20 # Maximum number of boolean expressions in a if statement max-bool-expr=5 # Maximum number of branch for function / method body max-branches=12 # Maximum number of locals for function / method body max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=30 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of statements in function / method body max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=0 [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict, _fields, _replace, _source, _make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception ospd-openvas-21.4.3/CHANGELOG.md000066400000000000000000000255401413127500500160340ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## [21.4.3] - 2021-10-12 ### Changed - Downgrade required version for psutil to 5.5.1 [#489](https://github.com/greenbone/ospd-openvas/pull/489) ### Fixed - Fix timeout preference handling. [#486](https://github.com/greenbone/ospd-openvas/pull/486) [21.4.3]: https://github.com/greenbone/ospd-openvas/compare/v21.4.2...v21.4.3 ## [21.4.2] - 2021-08-04 ### Changed - Use better defaults for for ospd-openvas settings [#454](https://github.com/greenbone/ospd-openvas/pull/454) - Improved error logging while trying to acquire a lock file [#458](https://github.com/greenbone/ospd-openvas/pull/458) - Stopping and interrupting scans. [#465](https://github.com/greenbone/ospd-openvas/pull/465) [21.4.2]: https://github.com/greenbone/ospd-openvas/compare/v21.4.1...v21.4.2 ## [21.4.1] - 2021-06-24 ### Added - Add new Credentials for SSH to get su privileges. Backport of [#419](https://github.com/greenbone/ospd-openvas/pull/419). [#426](https://github.com/greenbone/ospd-openvas/pull/426) ### Removed - Remove some special handling for Consider Alive alive test. [#413](https://github.com/greenbone/ospd-openvas/pull/413) [21.4.1]: https://github.com/greenbone/ospd-openvas/compare/v21.4.0...v21.4.1 ## [21.4.0] (2021-04-16) ### Added - Add dedicated port list for alive detection (Boreas only) as scanner preference if supplied via OSP. [#327](https://github.com/greenbone/ospd-openvas/pull/327) - Add methods for adding VTs to the redis cache. [#337](https://github.com/greenbone/ospd-openvas/pull/337) - Add support for supplying alive test methods via separate elements. [#331](https://github.com/greenbone/ospd-openvas/pull/331) - Add support CVSSv3 and accept new tags for severity vector, origin, date. [#346](https://github.com/greenbone/ospd-openvas/pull/346) ### Changed - Get all results from main kb. [#285](https://github.com/greenbone/ospd-openvas/pull/285) - Extend severities with origin and date. [#192](https://github.com/greenbone/ospd-openvas/pull/192) ### Deprecated ### Removed [21.4.0]: https://github.com/greenbone/ospd-openvas/compare/ospd-openvas-20.08...v21.4.0 ## [21.4.3] - 2021-10-12 ### Changed - Downgrade required version for psutil to 5.5.1 [#489](https://github.com/greenbone/ospd-openvas/pull/489) ## [21.4.3] - 2021-10-12### Fixed - Fix timeout preference handling. [#486](https://github.com/greenbone/ospd-openvas/pull/486) [21.4.3]: https://github.com/greenbone/ospd-openvas/compare/v21.4.2...v21.4.3 [21.4.3]: https://github.com/greenbone/ospd-openvas/compare/v21.4.2...v21.4.3 ## [20.8.2] () ### Added - Check for scanner error messages before leaving. [#395](https://github.com/greenbone/ospd-openvas/pull/395) ### Fixed - Don't crash with non-ascii chars in openvas.conf. [#381](https://github.com/greenbone/ospd-openvas/pull/381) ### Removed - Remove methods handling the nvticache name. [#318](https://github.com/greenbone/ospd-openvas/pull/318) - Remove py35 and py36 support. [#319](https://github.com/greenbone/ospd-openvas/pull/319) - Remove globalscanid. [#326](https://github.com/greenbone/ospd-openvas/pull/326) [20.8.2]: https://github.com/greenbone/ospd-openvas/compare/v20.08.1...ospd-openvas-20.08 ## (20.8.1) - 2021-02-01 ### Added - Add debug level log messages. [#373](https://github.com/greenbone/ospd-openvas/pull/373) ### Changed - Improve logging for unsatisfied vts dependencies. [#336](https://github.com/greenbone/ospd-openvas/pull/336) - Do not use busy wait when waiting for the openvas scan process to finish. [#360](https://github.com/greenbone/ospd-openvas/pull/360) - The description field of the systemd ospd-openvas.service file was updated. [#372](https://github.com/greenbone/ospd-openvas/pull/372) - Improve logging for unexpected data in script_xref tags. [#374](https://github.com/greenbone/ospd-openvas/pull/374) ### Fixed - Fix nvticache name for stable version from sources. [#317](https://github.com/greenbone/ospd-openvas/pull/317) - Fix stop scan during preferences handling, before spawning OpenVAS. [#332](https://github.com/greenbone/ospd-openvas/pull/332) - Fix alive test preferences when a non default method is selected. [#334](https://github.com/greenbone/ospd-openvas/pull/334) - Check for empty vts preferences list. [#340](https://github.com/greenbone/ospd-openvas/pull/340) - Fix progress calculation when the host count differs from the target string count. [#343](https://github.com/greenbone/ospd-openvas/pull/343) - Don't crash with non-ascii chars in openvas.conf. [#391](https://github.com/greenbone/ospd-openvas/pull/381) [20.8.1]: https://github.com/greenbone/ospd-openvas/compare/v20.8.0...ospd-openvas-20.08 ## [20.8.0] (2020-08-12) ### Added - Add solution method to solution of vt object. [#131](https://github.com/greenbone/ospd-openvas/pull/131) - Add typing to daemon.py, nvticache.py and db.py. [#161](https://github.com/greenbone/ospd-openvas/pull/161)[#162](https://github.com/greenbone/ospd-openvas/pull/162)[#163](https://github.com/greenbone/ospd-openvas/pull/163) - Add support for alive test settings. [#182](https://github.com/greenbone/ospd-openvas/pull/182) - Add missing scan preferences expand_vhosts and test_empty_vhost. [#184](https://github.com/greenbone/ospd-openvas/pull/184) - Set reverse lookup options. [#185](https://github.com/greenbone/ospd-openvas/pull/185) - Check if the amount of vts in redis is coherent. [#195](https://github.com/greenbone/ospd-openvas/pull/195) [#197](https://github.com/greenbone/ospd-openvas/pull/197) - Add support for test_alive_hosts_only feature of openvas. [#204](https://github.com/greenbone/ospd-openvas/pull/204) - Use lock file during feed update to avoid corrupted cache. [#207](https://github.com/greenbone/ospd-openvas/pull/207) - Add details parameter to get_vt_iterator(). [#215](https://github.com/greenbone/ospd-openvas/pull/215) - Add [pontos](https://github.com/greenbone/pontos) as dev dependency for managing the version information in ospd-openvas [#238](https://github.com/greenbone/ospd-openvas/pull/238) - Pass store directory to OSPDaemon init [#266](https://github.com/greenbone/ospd-openvas/pull/266) - Add URI field to results for file path or webservice URL [#271](https://github.com/greenbone/ospd-openvas/pull/271) - Add element to OSPD_PARAMS entries to indicate visibility for client. [#293](https://github.com/greenbone/ospd-openvas/pull/293) ### Changed - Less strict checks for the nvti cache version [#150](https://github.com/greenbone/ospd-openvas/pull/150) [#165](https://github.com/greenbone/ospd-openvas/pull/165) [#166](https://github.com/greenbone/ospd-openvas/pull/166) - Set self.vts to None if there is a pending feed. [#172](https://github.com/greenbone/ospd-openvas/pull/172) - Use the new method clear() from Vts class. [#193](https://github.com/greenbone/ospd-openvas/pull/193) - Start server before initialize the vts. [#196](https://github.com/greenbone/ospd-openvas/pull/196) - Get vts metadata from redis and reduce stored data in cache. [#205](https://github.com/greenbone/ospd-openvas/pull/205) - Update license to AGPL-3.0+ [#228](https://github.com/greenbone/ospd-openvas/pull/228) - Replaced pipenv with poetry for dependency management. `poetry install` works a bit different then `pipenv install`. It installs dev packages by default and also ospd in editable mode. This means after running poetry install ospd will directly be importable in the virtual python environment. [#235](https://github.com/greenbone/ospd-openvas/pull/235) - Don't send host details and log messages to the client when Boreas is enabled. [#252](https://github.com/greenbone/ospd-openvas/pull/252) - Progress bar calculation do not takes in account dead hosts. [#252](https://github.com/greenbone/ospd-openvas/pull/252) - Host progress is stored as integer. [#256](https://github.com/greenbone/ospd-openvas/pull/256) - Use flock for the feed lock file. [#257](https://github.com/greenbone/ospd-openvas/pull/257) - Improvements for fetching results from redis. [#282](https://github.com/greenbone/ospd-openvas/pull/282) - Add RW permission to the group on the feed lock file. [#300](https://github.com/greenbone/ospd-openvas/pull/300) [#301](https://github.com/greenbone/ospd-openvas/pull/301) ### Fixed - Check vt_aux for None before trying to access it. [#177](https://github.com/greenbone/ospd-openvas/pull/177) - Fix snmp credentials. [#186](https://github.com/greenbone/ospd-openvas/pull/186) - Escape script name before adding the result in an xml entity. [#188](https://github.com/greenbone/ospd-openvas/pull/188) - Fix handling of denied hosts. [#263](https://github.com/greenbone/ospd-openvas/pull/263) - Fix handling of special chars in credentials. [#294](https://github.com/greenbone/ospd-openvas/pull/294) - Fix type and default value of optimize_test preference. [#302](https://github.com/greenbone/ospd-openvas/pull/302) - Fix deploy and upload to pypi. [#315](https://github.com/greenbone/ospd-openvas/pull/315) - Fix ospd version dependency. [#316](https://github.com/greenbone/ospd-openvas/pull/316) ### Removed - Remove use_mac_addr, vhost_ip and vhost scan preferences. [#184](https://github.com/greenbone/ospd-openvas/pull/184) - Handling of finished host for resume task. [#252](https://github.com/greenbone/ospd-openvas/pull/252) - Don't release vts explicitly. [#261](https://github.com/greenbone/ospd-openvas/pull/261) - Drop handling of network_scan. [#265](https://github.com/greenbone/ospd-openvas/pull/265) [20.8.0]: https://github.com/greenbone/ospd-openvas/compare/ospd-openvas-1.0...ospd-openvas-20.08 ## [1.0.1] ### Added - Check the vt's preference value for type 'file'. [#130](https://github.com/greenbone/ospd-openvas/pull/130). - Check for malformed credentials. [#160](https://github.com/greenbone/ospd-openvas/pull/160). - Send messages generated by the scannner main process. [#171](https://github.com/greenbone/ospd-openvas/pull/171). ### Changed - Exit with exit code 1 if it was not possible to connect to redis. [#133](https://github.com/greenbone/ospd-openvas/pull/133) - Return None if the scan finished successfully. [#137](https://github.com/greenbone/ospd-openvas/pull/137) ### Fixed - Improve redis clean out when stopping a scan. [#128](https://github.com/greenbone/ospd-openvas/pull/128) - Improve error handling when creating vts xml elements. [#139](https://github.com/greenbone/ospd-openvas/pull/139) - Init the superclass with kwargs. [#141](https://github.com/greenbone/ospd-openvas/pull/141) - Avoid ospd-openvas to crash if redis is flushed during vt dictionary creation. [#146](https://github.com/greenbone/ospd-openvas/pull/146) [1.0.1]: https://github.com/greenbone/ospd-openvas/compare/v1.0.0...ospd-openvas-1.0 ## [1.0.0] (2019-10-11) This is the first release of the ospd-openvas module for the Greenbone Vulnerability Management (GVM) framework. [1.0.0]: https://github.com/greenbone/ospd-openvas/compare/v1.0.0 ospd-openvas-21.4.3/COPYING000066400000000000000000001023301413127500500152470ustar00rootroot00000000000000 GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Remote Network Interaction; Use with the GNU General Public License. Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . ospd-openvas-21.4.3/MANIFEST.in000066400000000000000000000002221413127500500157470ustar00rootroot00000000000000include CHANGELOG.md COPYING poetry.toml poetry.lock README.md setup.py docs/ospd-openvas.8 pyproject.toml recursive-include tests *.py *.csv ospd-openvas-21.4.3/README.md000066400000000000000000000114551413127500500155020ustar00rootroot00000000000000![Greenbone Logo](https://www.greenbone.net/wp-content/uploads/gb_logo_resilience_horizontal.png) # ospd-openvas [![GitHub releases](https://img.shields.io/github/release/greenbone/ospd-openvas.svg)](https://github.com/greenbone/ospd-openvas/releases) [![PyPI](https://img.shields.io/pypi/v/ospd-openvas.svg)](https://pypi.org/project/ospd-openvas/) [![code test coverage](https://codecov.io/gh/greenbone/ospd/branch/ospd-openvas-21.04/graphs/badge.svg)](https://codecov.io/gh/greenbone/ospd-openvas) [![Build and test](https://github.com/greenbone/ospd-openvas/actions/workflows/ci-python.yml/badge.svg?branch=ospd-openvas-21.04)](https://github.com/greenbone/ospd-openvas/actions/workflows/ci-python.yml?query=branch%3Aospd-openvas-21.04++) This is an OSP server implementation to allow GVM to remotely control OpenVAS, see . Once running, you need to configure OpenVAS for the Greenbone Vulnerability Manager, for example via the web interface Greenbone Security Assistant. Then you can create scan tasks to use OpenVAS. ## Installation ### Requirements Python 3.7 and later is supported. Beyond the [ospd base library](https://github.com/greenbone/ospd), `ospd-openvas` has dependencies on the following Python packages: - `redis` - `psutil` - `packaging` There are no special installation aspects for this module beyond the general installation guide for ospd-based scanners. Please follow the general installation guide for ospd-based scanners: ### Mandatory configuration The `ospd-openvas` startup parameter `--lock-file-dir` or the `lock_file_dir` config parameter of the `ospd.conf` config file needs to point to the same location / path of the `gvmd` daemon and the `openvas` command line tool (Default: `/var/run`). Examples for both are shipped within the `config` sub-folder of this project. Please see the `Details` section of the [GVM release notes](https://community.greenbone.net/t/gvm-20-08-stable-initial-release-2020-08-12/6312) for more details. ### Optional configuration Please note that although you can run `openvas` (launched from an `ospd-openvas` process) as a user without elevated privileges, it is recommended that you start `openvas` as `root` since a number of Network Vulnerability Tests (NVTs) require root privileges to perform certain operations like packet forgery. If you run `openvas` as a user without permission to perform these operations, your scan results are likely to be incomplete. As `openvas` will be launched from an `ospd-openvas` process with sudo, the next configuration is required in the sudoers file: sudo visudo add this line to allow the user running `ospd-openvas`, to launch `openvas` with root permissions ALL = NOPASSWD: /sbin/openvas If you set an install prefix, you have to update the path in the sudoers file too: Defaults secure_path=:/sbin ## Usage There are no special usage aspects for this module beyond the generic usage guide. Please follow the general usage guide for ospd-based scanners: ## Support For any question on the usage of ospd-openvas please use the [Greenbone Community Portal](https://community.greenbone.net/c/gse). If you found a problem with the software, please [create an issue](https://github.com/greenbone/ospd-openvas/issues) on GitHub. If you are a Greenbone customer you may alternatively or additionally forward your issue to the Greenbone Support Portal. ## Maintainer This project is maintained by [Greenbone Networks GmbH](https://www.greenbone.net/). ## Contributing Your contributions are highly appreciated. Please [create a pull request](https://github.com/greenbone/ospd-openvas/pulls) on GitHub. Bigger changes need to be discussed with the development team via the [issues section at GitHub](https://github.com/greenbone/ospd-openvas/issues) first. For development you should use [poetry](https://python-poetry.org) to keep you python packages separated in different environments. First install poetry via pip python3 -m pip install --user poetry Afterwards run poetry install in the checkout directory of ospd-openvas (the directory containing the `pyproject.toml` file) to install all dependencies including the packages only required for development. The ospd-openvas repository uses [autohooks](https://github.com/greenbone/autohooks) to apply linting and auto formatting via git hooks. Please ensure the git hooks are active. poetry install poetry run autohooks activate --force ## License Copyright (C) 2018-2021 [Greenbone Networks GmbH](https://www.greenbone.net/) Licensed under the [GNU Affero General Public License v3.0 or later](COPYING). ospd-openvas-21.4.3/config/000077500000000000000000000000001413127500500154625ustar00rootroot00000000000000ospd-openvas-21.4.3/config/ospd-openvas.conf000066400000000000000000000003201413127500500207420ustar00rootroot00000000000000[OSPD - openvas] log_level = INFO socket_mode = 0o770 unix_socket = /run/ospd/ospd-openvas.sock pid_file = /run/ospd/ospd-openvas.pid log_file = /var/log/gvm/ospd-openvas.log lock_file_dir = /var/lib/openvas ospd-openvas-21.4.3/config/ospd-openvas.service000066400000000000000000000011221413127500500214560ustar00rootroot00000000000000[Unit] Description=OSPd Wrapper for the OpenVAS Scanner (ospd-openvas) Documentation=man:ospd-openvas(8) man:openvas(8) After=network.target networking.service redis-server@openvas.service Wants=redis-server@openvas.service ConditionKernelCommandLine=!recovery [Service] Type=forking User=gvm Group=gvm RuntimeDirectory=ospd RuntimeDirectoryMode=2775 PIDFile=/run/ospd/ospd-openvas.pid ExecStart=/usr/local/bin/ospd-openvas --config /etc/gvm/ospd-openvas.conf --log-config /etc/gvm/ospd-logging.conf SuccessExitStatus=SIGKILL Restart=always RestartSec=60 [Install] WantedBy=multi-user.target ospd-openvas-21.4.3/docs/000077500000000000000000000000001413127500500151455ustar00rootroot00000000000000ospd-openvas-21.4.3/docs/ospd-openvas.8000066400000000000000000000115671413127500500176660ustar00rootroot00000000000000.TH OSPD-OpenVAS 8 "August 2019" "Greenbone Vulnerability Management" "User Manuals" .SH NAME ospd-openvas \- The OpenVAS Wrapper of the Greenbone Vulnerability Management .SH SYNOPSIS .BI "ospd-openvas [\|-v\|] [\|-h\|] [\|-c " config-file\| "] [\|--log-file " log-file\| "] .SH DESCRIPTION .B Greenbone Vulnerability Management (GVM) is a vulnerability auditing and management framework made up of several modules. The OSPD OpenVAS Wrapper, .BR ospd-openvas is in charge of the communication between the scanner OpenVAS and the clients (GVMd and gvm-tools). .BR ospd-openvas inspects the remote hosts to list all the vulnerabilities and common misconfigurations that affects them. It is a command line tool with parameters to start a daemon which keeps waiting for instructions to update the feed of vulnerability tests and to start a scan. The second part of the interface is the redis store where the parameters about a scan task need to be placed and from where the results can be retrieved, being the unique communication channel between OSPD-OpenVAS and OpenVAS. .SH OPTIONS .TP .BI "-s " ", --config-file" Use the alternate configuration file instead of .I ~/.config/ospd.conf .TP .B "--version" Print the version number and exit .TP .B "-h, --help" Show a summary of the commands .TP .BI "-p " PORT ", --port "PORT TCP Port to listen on. Default: 0 .TP .BI "-b " ADDRESS ", --bind-address "ADDRESS Address to listen on. Default: 0.0.0.0 .TP .BI "-u " UNIX_SOCKET ", --unix-socket "UNIX_SOCKET Unix file socket to listen on. Default: /var/run/ospd/ospd.sock .TP .BI "-m " SOCKET_MODE ", --socket-mode "SOCKET_MODE Unix file socket mode. Default: 0o700 .TP .BI "--pid-file "PID_FILE Location of the file for the process ID. Default: /var/run/ospd.pid .TP .BI "--lock-file-dir "LOCK_FILE_DIR Directory where the feed lock file is placed. Default: /var/run/ospd .TP .BI "-k " KEY_FILE ", --key-file "KEY_FILE Server key file. Default: /usr/var/lib/gvm/private/CA/serverkey.pem .TP .BI "-c " CERT_FILE ", --cert-file "CERT_FILE Server cert file. Default: /usr/var/lib/gvm/CA/servercert.pem .TP .BI "--ca-file "CA_FILE CA cert file. Default: /usr/var/lib/gvm/CA/cacert.pem .TP .BI "-L " LOG_LEVEL ", --log-level "LOG_LEVEL Desired level of logging. Default: WARNING .TP .BI "-f, --foreground" Run in foreground and logs all messages to console. .TP .BI "-l " LOG_FILE ", --log-file "LOG_FILE Path to the logging file. .TP .BI "--stream-timeout "TIMEOUT Set a timeout on socket operations. Default 10 seconds .TP .BI "--niceness "NICENESS Start the scan with the given niceness. Default 10 .TP .BI "--scaninfo-store-time "TIME Time in hours a scan is stored before being considered forgotten and being delete from the scan table. Default 0, disabled. .TP .BI "--max-scans "VALUE Max. amount of parallel task that can be started. Default 0, disabled. .TP .BI "--min-free-mem-scan-queue "MB Minimum free memory in MB required to run the scan. If no enough free memory is available, the scan is queued. Default 0, disabled. .TP .BI "--max-queued-scans "VALUE Maximum number allowed of queued scans before starting to reject new scans. Default 0, disabled. .SH THE CONFIGURATION FILE The default .B ospd-openvas configuration file, .I ~/.config/ospd.conf contains these options under the section [OSPD - openvas]: .IP log_level Wished level of logging. .IP socket_mode This option defines the permissions on a socket. It must be set in octal format. E.g. socket_mode = 0o770 .IP unix_socket This option specifies the socket path. .IP pid_file Location of the file for the process ID. .IP log_file Path to the log file. If no log file is given, the system log facility is used by default. .IP foreground If this option is set to yes, the daemon logs to the standard output instead of logging to a file or syslog. .IP niceness Start the scan with the given niceness. Default 10 .IP stream_timeout Set a timeout on socket operations. Default 10 seconds .IP scaninfo_store_time Time in hours a scan is stored before being considered forgotten and being delete from the scan table. Default 0, disabled. .IP max_scans Max. amount of parallel task that can be started. Default 0, disabled. .IP min_free_mem_scan_queue Minimum free memory in MB required to run the scan. If no enough free memory is available, the scan is queued. Default 0, disabled. .IP max_queued_scans Maximum number allowed of queued scans before starting to reject new scans. Default 0, disabled. .SH SEE ALSO \fBopenvas(8)\f1, \fBgsad(8)\f1, \fBgvmd(8)\f1, \fBgreenbone-nvt-sync(8)\f1, .SH MORE INFORMATION The canonical places where you will find more information about OSPD-OpenVAS are: .RS .UR https://community.greenbone.net Community Portal .UE .br .UR https://github.com/greenbone Development Platform .UE .br .UR https://www.openvas.org Traditional home site .UE .RE .SH AUTHORS ospd-openvas code is developed by Greenbone Networks GmbH. ospd-openvas-21.4.3/ospd_openvas/000077500000000000000000000000001413127500500167155ustar00rootroot00000000000000ospd-openvas-21.4.3/ospd_openvas/__init__.py000066400000000000000000000014571413127500500210350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . from .__version__ import __version__ ospd-openvas-21.4.3/ospd_openvas/__version__.py000066400000000000000000000001471413127500500215520ustar00rootroot00000000000000# pylint: disable=invalid-name # THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH! __version__ = "21.4.3" ospd-openvas-21.4.3/ospd_openvas/daemon.py000066400000000000000000001366131413127500500205440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=too-many-lines """ Setup for the OSP OpenVAS Server. """ import logging import time import copy from typing import Optional, Dict, List, Tuple, Iterator from datetime import datetime from pathlib import Path from os import geteuid from lxml.etree import tostring, SubElement, Element import psutil from ospd.ospd import OSPDaemon from ospd.scan import ScanProgress, ScanStatus from ospd.server import BaseServer from ospd.main import main as daemon_main from ospd.cvss import CVSS from ospd.vtfilter import VtsFilter from ospd.resultlist import ResultList from ospd_openvas import __version__ from ospd_openvas.errors import OspdOpenvasError from ospd_openvas.nvticache import NVTICache from ospd_openvas.db import MainDB, BaseDB from ospd_openvas.lock import LockFile from ospd_openvas.preferencehandler import PreferenceHandler from ospd_openvas.openvas import Openvas from ospd_openvas.vthelper import VtHelper logger = logging.getLogger(__name__) OSPD_DESC = """ This scanner runs OpenVAS to scan the target hosts. OpenVAS (Open Vulnerability Assessment Scanner) is a powerful scanner for vulnerabilities in IT infrastrucutres. The capabilities include unauthenticated scanning as well as authenticated scanning for various types of systems and services. For more details about OpenVAS see: http://www.openvas.org/ The current version of ospd-openvas is a simple frame, which sends the server parameters to the Greenbone Vulnerability Manager daemon (GVMd) and checks the existence of OpenVAS binary. But it can not run scans yet. """ OSPD_PARAMS = { 'auto_enable_dependencies': { 'type': 'boolean', 'name': 'auto_enable_dependencies', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': 'Automatically enable the plugins that are depended on', }, 'cgi_path': { 'type': 'string', 'name': 'cgi_path', 'default': '/cgi-bin:/scripts', 'mandatory': 1, 'visible_for_client': True, 'description': 'Look for default CGIs in /cgi-bin and /scripts', }, 'checks_read_timeout': { 'type': 'integer', 'name': 'checks_read_timeout', 'default': 5, 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Number of seconds that the security checks will ' + 'wait for when doing a recv()' ), }, 'non_simult_ports': { 'type': 'string', 'name': 'non_simult_ports', 'default': '139, 445, 3389, Services/irc', 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Prevent to make two connections on the same given ' + 'ports at the same time.' ), }, 'open_sock_max_attempts': { 'type': 'integer', 'name': 'open_sock_max_attempts', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Number of unsuccessful retries to open the socket ' + 'before to set the port as closed.' ), }, 'timeout_retry': { 'type': 'integer', 'name': 'timeout_retry', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Number of retries when a socket connection attempt ' + 'timesout.' ), }, 'optimize_test': { 'type': 'boolean', 'name': 'optimize_test', 'default': 1, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'By default, optimize_test is enabled which means openvas does ' + 'trust the remote host banners and is only launching plugins ' + 'against the services they have been designed to check. ' + 'For example it will check a web server claiming to be IIS only ' + 'for IIS related flaws but will skip plugins testing for Apache ' + 'flaws, and so on. This default behavior is used to optimize ' + 'the scanning performance and to avoid false positives. ' + 'If you are not sure that the banners of the remote host ' + 'have been tampered with, you can disable this option.' ), }, 'plugins_timeout': { 'type': 'integer', 'name': 'plugins_timeout', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': 'This is the maximum lifetime, in seconds of a plugin.', }, 'report_host_details': { 'type': 'boolean', 'name': 'report_host_details', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'safe_checks': { 'type': 'boolean', 'name': 'safe_checks', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Disable the plugins with potential to crash ' + 'the remote services' ), }, 'scanner_plugins_timeout': { 'type': 'integer', 'name': 'scanner_plugins_timeout', 'default': 36000, 'mandatory': 1, 'visible_for_client': True, 'description': 'Like plugins_timeout, but for ACT_SCANNER plugins.', }, 'time_between_request': { 'type': 'integer', 'name': 'time_between_request', 'default': 0, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Allow to set a wait time between two actions ' + '(open, send, close).' ), }, 'unscanned_closed': { 'type': 'boolean', 'name': 'unscanned_closed', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'unscanned_closed_udp': { 'type': 'boolean', 'name': 'unscanned_closed_udp', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'expand_vhosts': { 'type': 'boolean', 'name': 'expand_vhosts', 'default': 1, 'mandatory': 0, 'visible_for_client': True, 'description': 'Whether to expand the target hosts ' + 'list of vhosts with values gathered from sources ' + 'such as reverse-lookup queries and VT checks ' + 'for SSL/TLS certificates.', }, 'test_empty_vhost': { 'type': 'boolean', 'name': 'test_empty_vhost', 'default': 0, 'mandatory': 0, 'visible_for_client': True, 'description': 'If set to yes, the scanner will ' + 'also test the target by using empty vhost value ' + 'in addition to the targets associated vhost values.', }, 'max_hosts': { 'type': 'integer', 'name': 'max_hosts', 'default': 30, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'The maximum number of hosts to test at the same time which ' + 'should be given to the client (which can override it). ' + 'This value must be computed given your bandwidth, ' + 'the number of hosts you want to test, your amount of ' + 'memory and the performance of your processor(s).' ), }, 'max_checks': { 'type': 'integer', 'name': 'max_checks', 'default': 10, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'The number of plugins that will run against each host being ' + 'tested. Note that the total number of process will be max ' + 'checks x max_hosts so you need to find a balance between ' + 'these two options. Note that launching too many plugins at ' + 'the same time may disable the remote host, either temporarily ' + '(ie: inetd closes its ports) or definitely (the remote host ' + 'crash because it is asked to do too many things at the ' + 'same time), so be careful.' ), }, 'port_range': { 'type': 'string', 'name': 'port_range', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'This is the default range of ports that the scanner plugins will ' + 'probe. The syntax of this option is flexible, it can be a ' + 'single range ("1-1500"), several ports ("21,23,80"), several ' + 'ranges of ports ("1-1500,32000-33000"). Note that you can ' + 'specify UDP and TCP ports by prefixing each range by T or U. ' + 'For instance, the following range will make openvas scan UDP ' + 'ports 1 to 1024 and TCP ports 1 to 65535 : ' + '"T:1-65535,U:1-1024".' ), }, 'test_alive_hosts_only': { 'type': 'boolean', 'name': 'test_alive_hosts_only', 'default': 0, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'If this option is set, openvas will scan the target list for ' + 'alive hosts in a separate process while only testing those ' + 'hosts which are identified as alive. This boosts the scan ' + 'speed of target ranges with a high amount of dead hosts ' + 'significantly.' ), }, 'source_iface': { 'type': 'string', 'name': 'source_iface', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Name of the network interface that will be used as the source ' + 'of connections established by openvas. The scan won\'t be ' + 'launched if the value isn\'t authorized according to ' + '(sys_)ifaces_allow / (sys_)ifaces_deny if present.' ), }, 'ifaces_allow': { 'type': 'string', 'name': 'ifaces_allow', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of interfaces names that are authorized ' + 'as source_iface values.' ), }, 'ifaces_deny': { 'type': 'string', 'name': 'ifaces_deny', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of interfaces names that are not ' + 'authorized as source_iface values.' ), }, 'hosts_allow': { 'type': 'string', 'name': 'hosts_allow', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of the only targets that are authorized ' + 'to be scanned. Supports the same syntax as the list targets. ' + 'Both target hostnames and the address to which they resolve ' + 'are checked. Hostnames in hosts_allow list are not resolved ' + 'however.' ), }, 'hosts_deny': { 'type': 'string', 'name': 'hosts_deny', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of targets that are not authorized to ' + 'be scanned. Supports the same syntax as the list targets. ' + 'Both target hostnames and the address to which they resolve ' + 'are checked. Hostnames in hosts_deny list are not ' + 'resolved however.' ), }, } VT_BASE_OID = "1.3.6.1.4.1.25623." def safe_int(value: str) -> Optional[int]: """Convert a string into an integer and return None in case of errors during conversion """ try: return int(value) except (ValueError, TypeError): return None class OpenVasVtsFilter(VtsFilter): """Methods to overwrite the ones in the original class.""" def __init__(self, nvticache: NVTICache) -> None: super().__init__() self.nvti = nvticache def format_vt_modification_time(self, value: str) -> str: """Convert the string seconds since epoch into a 19 character string representing YearMonthDayHourMinuteSecond, e.g. 20190319122532. This always refers to UTC. """ return datetime.utcfromtimestamp(int(value)).strftime("%Y%m%d%H%M%S") def get_filtered_vts_list(self, vts, vt_filter: str) -> Optional[List[str]]: """Gets a collection of vulnerability test from the redis cache, which match the filter. Arguments: vt_filter: Filter to apply to the vts collection. vts: The complete vts collection. Returns: List with filtered vulnerability tests. The list can be empty. None in case of filter parse failure. """ filters = self.parse_filters(vt_filter) if not filters: return None if not self.nvti: return None vt_oid_list = [vtlist[1] for vtlist in self.nvti.get_oids()] vt_oid_list_temp = copy.copy(vt_oid_list) vthelper = VtHelper(self.nvti) for element, oper, filter_val in filters: for vt_oid in vt_oid_list_temp: if vt_oid not in vt_oid_list: continue vt = vthelper.get_single_vt(vt_oid) if vt is None or not vt.get(element): vt_oid_list.remove(vt_oid) continue elem_val = vt.get(element) val = self.format_filter_value(element, elem_val) if self.filter_operator[oper](val, filter_val): continue else: vt_oid_list.remove(vt_oid) return vt_oid_list class OSPDopenvas(OSPDaemon): """Class for ospd-openvas daemon.""" def __init__( self, *, niceness=None, lock_file_dir='/var/lib/openvas', **kwargs ): """Initializes the ospd-openvas daemon's internal data.""" self.main_db = MainDB() self.nvti = NVTICache(self.main_db) super().__init__( customvtfilter=OpenVasVtsFilter(self.nvti), storage=dict, file_storage_dir=lock_file_dir, **kwargs, ) self.server_version = __version__ self._niceness = str(niceness) self.feed_lock = LockFile(Path(lock_file_dir) / 'feed-update.lock') self.daemon_info['name'] = 'OSPd OpenVAS' self.scanner_info['name'] = 'openvas' self.scanner_info['version'] = '' # achieved during self.init() self.scanner_info['description'] = OSPD_DESC for name, param in OSPD_PARAMS.items(): self.set_scanner_param(name, param) self._sudo_available = None self._is_running_as_root = None self.scan_only_params = dict() def init(self, server: BaseServer) -> None: self.scan_collection.init() server.start(self.handle_client_stream) self.scanner_info['version'] = Openvas.get_version() self.set_params_from_openvas_settings() with self.feed_lock.wait_for_lock(): Openvas.load_vts_into_redis() current_feed = self.nvti.get_feed_version() self.set_vts_version(vts_version=current_feed) logger.debug("Calculating vts integrity check hash...") vthelper = VtHelper(self.nvti) self.vts.sha256_hash = vthelper.calculate_vts_collection_hash() self.initialized = True def set_params_from_openvas_settings(self): """Set OSPD_PARAMS with the params taken from the openvas executable.""" param_list = Openvas.get_settings() for elem in param_list: # pylint: disable=consider-using-dict-items if elem not in OSPD_PARAMS: self.scan_only_params[elem] = param_list[elem] else: OSPD_PARAMS[elem]['default'] = param_list[elem] def feed_is_outdated(self, current_feed: str) -> Optional[bool]: """Compare the current feed with the one in the disk. Return: False if there is no new feed. True if the feed version in disk is newer than the feed in redis cache. None if there is no feed on the disk. """ plugins_folder = self.scan_only_params.get('plugins_folder') if not plugins_folder: raise OspdOpenvasError("Error: Path to plugins folder not found.") feed_info_file = Path(plugins_folder) / 'plugin_feed_info.inc' if not feed_info_file.exists(): self.set_params_from_openvas_settings() logger.debug('Plugins feed file %s not found.', feed_info_file) return None current_feed = safe_int(current_feed) if current_feed is None: logger.debug( "Wrong PLUGIN_SET format in plugins feed file %s. Format has to" " be yyyymmddhhmm. For example 'PLUGIN_SET = \"201910251033\"'", feed_info_file, ) feed_date = None with feed_info_file.open() as fcontent: for line in fcontent: if "PLUGIN_SET" in line: feed_date = line.split('=', 1)[1] feed_date = feed_date.strip() feed_date = feed_date.replace(';', '') feed_date = feed_date.replace('"', '') feed_date = safe_int(feed_date) break logger.debug("Current feed version: %s", current_feed) logger.debug("Plugin feed version: %s", feed_date) return ( (not feed_date) or (not current_feed) or (current_feed < feed_date) ) def check_feed(self): """Check if there is a feed update. Wait until all the running scans finished. Set a flag to announce there is a pending feed update, which avoids to start a new scan. """ if not self.vts.is_cache_available: return current_feed = self.nvti.get_feed_version() is_outdated = self.feed_is_outdated(current_feed) # Check if the nvticache in redis is outdated if not current_feed or is_outdated: with self.feed_lock as fl: if fl.has_lock(): self.initialized = False Openvas.load_vts_into_redis() current_feed = self.nvti.get_feed_version() self.set_vts_version(vts_version=current_feed) vthelper = VtHelper(self.nvti) self.vts.sha256_hash = ( vthelper.calculate_vts_collection_hash() ) self.initialized = True else: logger.debug( "The feed was not upload or it is outdated, " "but other process is locking the update. " "Trying again later..." ) return def scheduler(self): """This method is called periodically to run tasks.""" self.check_feed() def get_vt_iterator( self, vt_selection: List[str] = None, details: bool = True ) -> Iterator[Tuple[str, Dict]]: vthelper = VtHelper(self.nvti) return vthelper.get_vt_iterator(vt_selection, details) @staticmethod def get_custom_vt_as_xml_str(vt_id: str, custom: Dict) -> str: """Return an xml element with custom metadata formatted as string. Arguments: vt_id: VT OID. Only used for logging in error case. custom: Dictionary with the custom metadata. Return: Xml element as string. """ _custom = Element('custom') for key, val in custom.items(): xml_key = SubElement(_custom, key) try: xml_key.text = val except ValueError as e: logger.warning( "Not possible to parse custom tag for VT %s: %s", vt_id, e ) return tostring(_custom).decode('utf-8') @staticmethod def get_severities_vt_as_xml_str(vt_id: str, severities: Dict) -> str: """Return an xml element with severities as string. Arguments: vt_id: VT OID. Only used for logging in error case. severities: Dictionary with the severities. Return: Xml element as string. """ _severities = Element('severities') _severity = SubElement(_severities, 'severity') if 'severity_base_vector' in severities: try: _value = SubElement(_severity, 'value') _value.text = severities.get('severity_base_vector') except ValueError as e: logger.warning( "Not possible to parse severity tag for vt %s: %s", vt_id, e ) if 'severity_origin' in severities: _origin = SubElement(_severity, 'origin') _origin.text = severities.get('severity_origin') if 'severity_date' in severities: _date = SubElement(_severity, 'date') _date.text = severities.get('severity_date') if 'severity_type' in severities: _severity.set('type', severities.get('severity_type')) return tostring(_severities).decode('utf-8') @staticmethod def get_params_vt_as_xml_str(vt_id: str, vt_params: Dict) -> str: """Return an xml element with params formatted as string. Arguments: vt_id: VT OID. Only used for logging in error case. vt_params: Dictionary with the VT parameters. Return: Xml element as string. """ vt_params_xml = Element('params') for _pref_id, prefs in vt_params.items(): vt_param = Element('param') vt_param.set('type', prefs['type']) vt_param.set('id', _pref_id) xml_name = SubElement(vt_param, 'name') try: xml_name.text = prefs['name'] except ValueError as e: logger.warning( "Not possible to parse parameter for VT %s: %s", vt_id, e ) if prefs['default']: xml_def = SubElement(vt_param, 'default') try: xml_def.text = prefs['default'] except ValueError as e: logger.warning( "Not possible to parse default parameter for VT %s: %s", vt_id, e, ) vt_params_xml.append(vt_param) return tostring(vt_params_xml).decode('utf-8') @staticmethod def get_refs_vt_as_xml_str(vt_id: str, vt_refs: Dict) -> str: """Return an xml element with references formatted as string. Arguments: vt_id: VT OID. Only used for logging in error case. vt_refs: Dictionary with the VT references. Return: Xml element as string. """ vt_refs_xml = Element('refs') for ref_type, ref_values in vt_refs.items(): for value in ref_values: vt_ref = Element('ref') if ref_type == "xref" and value: for xref in value.split(', '): try: _type, _id = xref.split(':', 1) except ValueError as e: logger.error( 'Not possible to parse xref "%s" for VT %s: %s', xref, vt_id, e, ) continue vt_ref.set('type', _type.lower()) vt_ref.set('id', _id) elif value: vt_ref.set('type', ref_type.lower()) vt_ref.set('id', value) else: continue vt_refs_xml.append(vt_ref) return tostring(vt_refs_xml).decode('utf-8') @staticmethod def get_dependencies_vt_as_xml_str( vt_id: str, vt_dependencies: List ) -> str: """Return an xml element with dependencies as string. Arguments: vt_id: VT OID. Only used for logging in error case. vt_dependencies: List with the VT dependencies. Return: Xml element as string. """ vt_deps_xml = Element('dependencies') for dep in vt_dependencies: _vt_dep = Element('dependency') if VT_BASE_OID in dep: _vt_dep.set('vt_id', dep) else: logger.error( 'Not possible to add dependency %s for VT %s', dep, vt_id ) continue vt_deps_xml.append(_vt_dep) return tostring(vt_deps_xml).decode('utf-8') @staticmethod def get_creation_time_vt_as_xml_str( vt_id: str, vt_creation_time: str ) -> str: """Return creation time as string. Arguments: vt_id: VT OID. Only used for logging in error case. vt_creation_time: String with the VT creation time. Return: Xml element as string. """ _time = Element('creation_time') try: _time.text = vt_creation_time except ValueError as e: logger.warning( "Not possible to parse creation time for VT %s: %s", vt_id, e ) return tostring(_time).decode('utf-8') @staticmethod def get_modification_time_vt_as_xml_str( vt_id: str, vt_modification_time: str ) -> str: """Return modification time as string. Arguments: vt_id: VT OID. Only used for logging in error case. vt_modification_time: String with the VT modification time. Return: Xml element as string. """ _time = Element('modification_time') try: _time.text = vt_modification_time except ValueError as e: logger.warning( "Not possible to parse modification time for VT %s: %s", vt_id, e, ) return tostring(_time).decode('utf-8') @staticmethod def get_summary_vt_as_xml_str(vt_id: str, summary: str) -> str: """Return summary as string. Arguments: vt_id: VT OID. Only used for logging in error case. summary: String with a VT summary. Return: Xml element as string. """ _summary = Element('summary') try: _summary.text = summary except ValueError as e: logger.warning( "Not possible to parse summary tag for VT %s: %s", vt_id, e ) return tostring(_summary).decode('utf-8') @staticmethod def get_impact_vt_as_xml_str(vt_id: str, impact) -> str: """Return impact as string. Arguments: vt_id (str): VT OID. Only used for logging in error case. impact (str): String which explain the vulneravility impact. Return: string: xml element as string. """ _impact = Element('impact') try: _impact.text = impact except ValueError as e: logger.warning( "Not possible to parse impact tag for VT %s: %s", vt_id, e ) return tostring(_impact).decode('utf-8') @staticmethod def get_affected_vt_as_xml_str(vt_id: str, affected: str) -> str: """Return affected as string. Arguments: vt_id: VT OID. Only used for logging in error case. affected: String which explain what is affected. Return: Xml element as string. """ _affected = Element('affected') try: _affected.text = affected except ValueError as e: logger.warning( "Not possible to parse affected tag for VT %s: %s", vt_id, e ) return tostring(_affected).decode('utf-8') @staticmethod def get_insight_vt_as_xml_str(vt_id: str, insight: str) -> str: """Return insight as string. Arguments: vt_id: VT OID. Only used for logging in error case. insight: String giving an insight of the vulnerability. Return: Xml element as string. """ _insight = Element('insight') try: _insight.text = insight except ValueError as e: logger.warning( "Not possible to parse insight tag for VT %s: %s", vt_id, e ) return tostring(_insight).decode('utf-8') @staticmethod def get_solution_vt_as_xml_str( vt_id: str, solution: str, solution_type: Optional[str] = None, solution_method: Optional[str] = None, ) -> str: """Return solution as string. Arguments: vt_id: VT OID. Only used for logging in error case. solution: String giving a possible solution. solution_type: A solution type solution_method: A solution method Return: Xml element as string. """ _solution = Element('solution') try: _solution.text = solution except ValueError as e: logger.warning( "Not possible to parse solution tag for VT %s: %s", vt_id, e ) if solution_type: _solution.set('type', solution_type) if solution_method: _solution.set('method', solution_method) return tostring(_solution).decode('utf-8') @staticmethod def get_detection_vt_as_xml_str( vt_id: str, detection: Optional[str] = None, qod_type: Optional[str] = None, qod: Optional[str] = None, ) -> str: """Return detection as string. Arguments: vt_id: VT OID. Only used for logging in error case. detection: String which explain how the vulnerability was detected. qod_type: qod type. qod: qod value. Return: Xml element as string. """ _detection = Element('detection') if detection: try: _detection.text = detection except ValueError as e: logger.warning( "Not possible to parse detection tag for VT %s: %s", vt_id, e, ) if qod_type: _detection.set('qod_type', qod_type) elif qod: _detection.set('qod', qod) return tostring(_detection).decode('utf-8') @property def is_running_as_root(self) -> bool: """Check if it is running as root user.""" if self._is_running_as_root is not None: return self._is_running_as_root self._is_running_as_root = False if geteuid() == 0: self._is_running_as_root = True return self._is_running_as_root @property def sudo_available(self) -> bool: """Checks that sudo is available""" if self._sudo_available is not None: return self._sudo_available if self.is_running_as_root: self._sudo_available = False return self._sudo_available self._sudo_available = Openvas.check_sudo() return self._sudo_available def check(self) -> bool: """Checks that openvas command line tool is found and is executable.""" has_openvas = Openvas.check() if not has_openvas: logger.error( 'openvas executable not available. Please install openvas' ' into your PATH.' ) return has_openvas def report_openvas_scan_status(self, kbdb: BaseDB, scan_id: str): """Get all status entries from redis kb. Arguments: kbdb: KB context where to get the status from. scan_id: Scan ID to identify the current scan. """ all_status = kbdb.get_scan_status() all_hosts = dict() finished_hosts = list() for res in all_status: try: current_host, launched, total = res.split('/') except ValueError: continue try: if float(total) == 0: continue elif float(total) == ScanProgress.DEAD_HOST: host_prog = ScanProgress.DEAD_HOST else: host_prog = int((float(launched) / float(total)) * 100) except TypeError: continue all_hosts[current_host] = host_prog if ( host_prog == ScanProgress.DEAD_HOST or host_prog == ScanProgress.FINISHED ): finished_hosts.append(current_host) logger.debug( '%s: Host %s has progress: %d', scan_id, current_host, host_prog ) self.set_scan_progress_batch(scan_id, host_progress=all_hosts) self.sort_host_finished(scan_id, finished_hosts) def get_severity_score(self, vt_aux: dict) -> Optional[float]: """Return the severity score for the given oid. Arguments: vt_aux: VT element from which to get the severity vector Returns: The calculated cvss base value. None if there is no severity vector or severity type is not cvss base version 2. """ if vt_aux: severity_type = vt_aux['severities'].get('severity_type') severity_vector = vt_aux['severities'].get('severity_base_vector') if severity_type == "cvss_base_v2" and severity_vector: return CVSS.cvss_base_v2_value(severity_vector) elif severity_type == "cvss_base_v3" and severity_vector: return CVSS.cvss_base_v3_value(severity_vector) return None def report_openvas_results(self, db: BaseDB, scan_id: str) -> bool: """Get all result entries from redis kb.""" vthelper = VtHelper(self.nvti) # Result messages come in the next form, with optional uri field # type ||| host ip ||| hostname ||| port ||| OID ||| value [|||uri] all_results = db.get_result() res_list = ResultList() total_dead = 0 for res in all_results: if not res: continue msg = res.split('|||') roid = msg[4].strip() rqod = '' rname = '' current_host = msg[1].strip() if msg[1] else '' rhostname = msg[2].strip() if msg[2] else '' host_is_dead = "Host dead" in msg[5] or msg[0] == "DEADHOST" host_deny = "Host access denied" in msg[5] start_end_msg = msg[0] == "HOST_START" or msg[0] == "HOST_END" host_count = msg[0] == "HOSTS_COUNT" vt_aux = None # URI is optional and msg list length must be checked ruri = '' if len(msg) > 6: ruri = msg[6] if ( roid and not host_is_dead and not host_deny and not start_end_msg and not host_count ): vt_aux = vthelper.get_single_vt(roid) if ( not vt_aux and not host_is_dead and not host_deny and not start_end_msg and not host_count ): logger.warning('Invalid VT oid %s for a result', roid) if vt_aux: if vt_aux.get('qod_type'): qod_t = vt_aux.get('qod_type') rqod = self.nvti.QOD_TYPES[qod_t] elif vt_aux.get('qod'): rqod = vt_aux.get('qod') rname = vt_aux.get('name') if msg[0] == 'ERRMSG': res_list.add_scan_error_to_list( host=current_host, hostname=rhostname, name=rname, value=msg[5], port=msg[3], test_id=roid, uri=ruri, ) elif msg[0] == 'HOST_START' or msg[0] == 'HOST_END': res_list.add_scan_log_to_list( host=current_host, name=msg[0], value=msg[5], ) elif msg[0] == 'LOG': res_list.add_scan_log_to_list( host=current_host, hostname=rhostname, name=rname, value=msg[5], port=msg[3], qod=rqod, test_id=roid, uri=ruri, ) elif msg[0] == 'HOST_DETAIL': res_list.add_scan_host_detail_to_list( host=current_host, hostname=rhostname, name=rname, value=msg[5], uri=ruri, ) elif msg[0] == 'ALARM': rseverity = self.get_severity_score(vt_aux) res_list.add_scan_alarm_to_list( host=current_host, hostname=rhostname, name=rname, value=msg[5], port=msg[3], test_id=roid, severity=rseverity, qod=rqod, uri=ruri, ) # To process non-scanned dead hosts when # test_alive_host_only in openvas is enable elif msg[0] == 'DEADHOST': try: total_dead = int(msg[5]) except TypeError: logger.debug('Error processing dead host count') # To update total host count if msg[0] == 'HOSTS_COUNT': try: count_total = int(msg[5]) logger.debug( '%s: Set total hosts counted by OpenVAS: %d', scan_id, count_total, ) self.set_scan_total_hosts(scan_id, count_total) except TypeError: logger.debug('Error processing total host count') # Insert result batch into the scan collection table. if len(res_list): self.scan_collection.add_result_list(scan_id, res_list) logger.debug( '%s: Inserting %d results into scan collection table', scan_id, len(res_list), ) if total_dead: logger.debug( '%s: Set dead hosts counted by OpenVAS: %d', scan_id, total_dead, ) self.scan_collection.set_amount_dead_hosts( scan_id, total_dead=total_dead ) return len(res_list) > 0 @staticmethod def is_openvas_process_alive(openvas_process: psutil.Popen) -> bool: if openvas_process.status() == psutil.STATUS_ZOMBIE: logger.debug("Process is a Zombie, waiting for it to clean up") openvas_process.wait() return openvas_process.is_running() def stop_scan_cleanup( self, kbdb: BaseDB, scan_id: str, ovas_process: psutil.Popen, # pylint: disable=arguments-differ ): """Set a key in redis to indicate the wrapper is stopped. It is done through redis because it is a new multiprocess instance and it is not possible to reach the variables of the grandchild process. Indirectly sends SIGUSR1 to the running openvas scan process via an invocation of openvas with the --scan-stop option to stop it.""" if kbdb: # Set stop flag in redis kbdb.stop_scan(scan_id) # Check if openvas is running if ovas_process.is_running(): # Cleaning in case of Zombie Process if ovas_process.status() == psutil.STATUS_ZOMBIE: logger.debug( '%s: Process with PID %s is a Zombie process.' ' Cleaning up...', scan_id, ovas_process.pid, ) ovas_process.wait() # Stop openvas process and wait until it stopped else: can_stop_scan = Openvas.stop_scan( scan_id, not self.is_running_as_root and self.sudo_available, ) if not can_stop_scan: logger.debug( 'Not possible to stop scan process: %s.', ovas_process, ) return logger.debug('Stopping process: %s', ovas_process) while ovas_process.is_running(): if ovas_process.status() == psutil.STATUS_ZOMBIE: ovas_process.wait() else: time.sleep(0.1) else: logger.debug( "%s: Process with PID %s already stopped", scan_id, ovas_process.pid, ) # Clean redis db for scan_db in kbdb.get_scan_databases(): self.main_db.release_database(scan_db) def exec_scan(self, scan_id: str): """Starts the OpenVAS scanner for scan_id scan.""" do_not_launch = False kbdb = self.main_db.get_new_kb_database() scan_prefs = PreferenceHandler( scan_id, kbdb, self.scan_collection, self.nvti ) kbdb.add_scan_id(scan_id) scan_prefs.prepare_target_for_openvas() if not scan_prefs.prepare_ports_for_openvas(): self.add_scan_error( scan_id, name='', host='', value='No port list defined.' ) do_not_launch = True # Set credentials if not scan_prefs.prepare_credentials_for_openvas(): self.add_scan_error( scan_id, name='', host='', value='Malformed credential.' ) do_not_launch = True if not scan_prefs.prepare_plugins_for_openvas(): self.add_scan_error( scan_id, name='', host='', value='No VTS to run.' ) do_not_launch = True scan_prefs.prepare_main_kbindex_for_openvas() scan_prefs.prepare_host_options_for_openvas() scan_prefs.prepare_scan_params_for_openvas(OSPD_PARAMS) scan_prefs.prepare_reverse_lookup_opt_for_openvas() scan_prefs.prepare_alive_test_option_for_openvas() # VT preferences are stored after all preferences have been processed, # since alive tests preferences have to be able to overwrite default # preferences of ping_host.nasl for the classic method. scan_prefs.prepare_nvt_preferences() scan_prefs.prepare_boreas_alive_test() # Release memory used for scan preferences. del scan_prefs if do_not_launch or kbdb.scan_is_stopped(scan_id): self.main_db.release_database(kbdb) return openvas_process = Openvas.start_scan( scan_id, not self.is_running_as_root and self.sudo_available, self._niceness, ) if openvas_process is None: self.main_db.release_database(kbdb) return kbdb.add_scan_process_id(openvas_process.pid) logger.debug('pid = %s', openvas_process.pid) # Wait until the scanner starts and loads all the preferences. while kbdb.get_status(scan_id) == 'new': res = openvas_process.poll() if res and res < 0: self.stop_scan_cleanup(kbdb, scan_id, openvas_process) logger.error( 'It was not possible run the task %s, since openvas ended ' 'unexpectedly with errors during launching.', scan_id, ) return time.sleep(1) got_results = False while True: openvas_process_is_alive = self.is_openvas_process_alive( openvas_process ) target_is_finished = kbdb.target_is_finished(scan_id) scan_stopped = self.get_scan_status(scan_id) == ScanStatus.STOPPED # Report new Results and update status got_results = self.report_openvas_results(kbdb, scan_id) self.report_openvas_scan_status(kbdb, scan_id) # Check if the client stopped the whole scan if scan_stopped: logger.debug('%s: Scan stopped by the client', scan_id) self.stop_scan_cleanup(kbdb, scan_id, openvas_process) # clean main_db, but wait for scanner to finish. while not kbdb.target_is_finished(scan_id): logger.debug('%s: Waiting for openvas to finish', scan_id) time.sleep(1) self.main_db.release_database(kbdb) return # Scan end. No kb in use for this scan id if target_is_finished: logger.debug('%s: Target is finished', scan_id) break if not openvas_process_is_alive: logger.error( 'Task %s was unexpectedly stopped or killed.', scan_id, ) self.add_scan_error( scan_id, name='', host='', value='Task was unexpectedly stopped or killed.', ) # check for scanner error messages before leaving. self.report_openvas_results(kbdb, scan_id) kbdb.stop_scan(scan_id) for scan_db in kbdb.get_scan_databases(): self.main_db.release_database(scan_db) self.main_db.release_database(kbdb) return # Wait a second before trying to get result from redis if there # was no results before. # Otherwise, wait 50 msec to give access other process to redis. if not got_results: time.sleep(1) else: time.sleep(0.05) got_results = False # Delete keys from KB related to this scan task. logger.debug('%s: End Target. Release main database', scan_id) self.main_db.release_database(kbdb) def main(): """OSP openvas main function.""" daemon_main('OSPD - openvas', OSPDopenvas) if __name__ == '__main__': main() ospd-openvas-21.4.3/ospd_openvas/db.py000066400000000000000000000475451413127500500176730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . """ Access management for redis-based OpenVAS Scanner Database.""" import logging import sys import time from typing import List, NewType, Optional, Iterable, Iterator, Tuple import redis from ospd.errors import RequiredArgument from ospd_openvas.errors import OspdOpenvasError from ospd_openvas.openvas import Openvas SOCKET_TIMEOUT = 60 # in seconds LIST_FIRST_POS = 0 LIST_LAST_POS = -1 LIST_ALL = 0 # Possible positions of nvt values in cache list. NVT_META_FIELDS = [ "NVT_FILENAME_POS", "NVT_REQUIRED_KEYS_POS", "NVT_MANDATORY_KEYS_POS", "NVT_EXCLUDED_KEYS_POS", "NVT_REQUIRED_UDP_PORTS_POS", "NVT_REQUIRED_PORTS_POS", "NVT_DEPENDENCIES_POS", "NVT_TAGS_POS", "NVT_CVES_POS", "NVT_BIDS_POS", "NVT_XREFS_POS", "NVT_CATEGORY_POS", "NVT_TIMEOUT_POS", "NVT_FAMILY_POS", "NVT_NAME_POS", ] # Name of the namespace usage bitmap in redis. DBINDEX_NAME = "GVM.__GlobalDBIndex" logger = logging.getLogger(__name__) # Types RedisCtx = NewType('RedisCtx', redis.Redis) class OpenvasDB: """Class to connect to redis, to perform queries, and to move from a KB to another.""" _db_address = None @classmethod def get_database_address(cls) -> Optional[str]: if not cls._db_address: settings = Openvas.get_settings() cls._db_address = settings.get('db_address') return cls._db_address @classmethod def create_context( cls, dbnum: Optional[int] = 0, encoding: Optional[str] = 'latin-1' ) -> RedisCtx: """Connect to redis to the given database or to the default db 0 . Arguments: dbnum: The db number to connect to. encoding: The encoding to be used to read and write. Return a new redis context on success. """ tries = 5 while tries: try: ctx = redis.Redis( unix_socket_path=cls.get_database_address(), db=dbnum, socket_timeout=SOCKET_TIMEOUT, encoding=encoding, decode_responses=True, ) ctx.keys("test") except (redis.exceptions.ConnectionError, FileNotFoundError) as err: logger.debug( 'Redis connection lost: %s. Trying again in 5 seconds.', err ) tries = tries - 1 time.sleep(5) continue break if not tries: logger.error('Redis Error: Not possible to connect to the kb.') sys.exit(1) return ctx @classmethod def find_database_by_pattern( cls, pattern: str, max_database_index: int ) -> Tuple[Optional[RedisCtx], Optional[int]]: """Search a pattern inside all kbs up to max_database_index. Returns the redis context for the db and its index as a tuple or None, None if the db with the pattern couldn't be found. """ for i in range(0, max_database_index): ctx = cls.create_context(i) if ctx.keys(pattern): return (ctx, i) return (None, None) @staticmethod def select_database(ctx: RedisCtx, kbindex: str): """Use an existent redis connection and select a redis kb. Arguments: ctx: Redis context to use. kbindex: The new kb to select """ if not ctx: raise RequiredArgument('select_database', 'ctx') if not kbindex: raise RequiredArgument('select_database', 'kbindex') ctx.execute_command('SELECT ' + str(kbindex)) @staticmethod def get_list_item( ctx: RedisCtx, name: str, start: Optional[int] = LIST_FIRST_POS, end: Optional[int] = LIST_LAST_POS, ) -> Optional[list]: """Returns the specified elements from `start` to `end` of the list stored as `name`. Arguments: ctx: Redis context to use. name: key name of a list. start: first range element to get. end: last range element to get. Return List specified elements in the key. """ if not ctx: raise RequiredArgument('get_list_item', 'ctx') if not name: raise RequiredArgument('get_list_item', 'name') return ctx.lrange(name, start, end) @staticmethod def get_last_list_item(ctx: RedisCtx, name: str) -> str: if not ctx: raise RequiredArgument('get_last_list_item', 'ctx') if not name: raise RequiredArgument('get_last_list_item', 'name') return ctx.rpop(name) @staticmethod def pop_list_items(ctx: RedisCtx, name: str) -> List[str]: if not ctx: raise RequiredArgument('pop_list_items', 'ctx') if not name: raise RequiredArgument('pop_list_items', 'name') pipe = ctx.pipeline() pipe.lrange(name, LIST_FIRST_POS, LIST_LAST_POS) pipe.delete(name) results, redis_return_code = pipe.execute() # The results are left-pushed. To preserver the order # the result list must be reversed. if redis_return_code: results.reverse() else: results = [] return results @staticmethod def get_key_count(ctx: RedisCtx, pattern: Optional[str] = None) -> int: """Get the number of keys matching with the pattern. Arguments: ctx: Redis context to use. pattern: pattern used as filter. """ if not pattern: pattern = "*" if not ctx: raise RequiredArgument('get_key_count', 'ctx') return len(ctx.keys(pattern)) @staticmethod def remove_list_item(ctx: RedisCtx, key: str, value: str): """Remove item from the key list. Arguments: ctx: Redis context to use. key: key name of a list. value: Value to be removed from the key. """ if not ctx: raise RequiredArgument('remove_list_item ', 'ctx') if not key: raise RequiredArgument('remove_list_item', 'key') if not value: raise RequiredArgument('remove_list_item ', 'value') ctx.lrem(key, count=LIST_ALL, value=value) @staticmethod def get_single_item( ctx: RedisCtx, name: str, index: Optional[int] = LIST_FIRST_POS, ) -> Optional[str]: """Get a single KB element. Arguments: ctx: Redis context to use. name: key name of a list. index: index of the element to be return. Defaults to the first element in the list. Return the first element of the list or None if the name couldn't be found. """ if not ctx: raise RequiredArgument('get_single_item', 'ctx') if not name: raise RequiredArgument('get_single_item', 'name') return ctx.lindex(name, index) @staticmethod def add_single_list(ctx: RedisCtx, name: str, values: Iterable): """Add a single KB element with one or more values. The values can be repeated. If the key already exists will be removed an completely replaced. Arguments: ctx: Redis context to use. name: key name of a list. value: Elements to add to the key. """ if not ctx: raise RequiredArgument('add_single_list', 'ctx') if not name: raise RequiredArgument('add_single_list', 'name') if not values: raise RequiredArgument('add_single_list', 'value') pipe = ctx.pipeline() pipe.delete(name) pipe.rpush(name, *values) pipe.execute() @staticmethod def add_single_item(ctx: RedisCtx, name: str, values: Iterable): """Add a single KB element with one or more values. Don't add duplicated values during this operation, but if the the same values already exists under the key, this will not be overwritten. Arguments: ctx: Redis context to use. name: key name of a list. value: Elements to add to the key. """ if not ctx: raise RequiredArgument('add_single_item', 'ctx') if not name: raise RequiredArgument('add_single_item', 'name') if not values: raise RequiredArgument('add_single_item', 'value') ctx.rpush(name, *set(values)) @staticmethod def set_single_item(ctx: RedisCtx, name: str, value: Iterable): """Set (replace) a single KB element. If the same key exists in the kb, it is completed removed. Values added are unique. Arguments: ctx: Redis context to use. name: key name of a list. value: New elements to add to the key. """ if not ctx: raise RequiredArgument('set_single_item', 'ctx') if not name: raise RequiredArgument('set_single_item', 'name') if not value: raise RequiredArgument('set_single_item', 'value') pipe = ctx.pipeline() pipe.delete(name) pipe.rpush(name, *set(value)) pipe.execute() @staticmethod def get_pattern(ctx: RedisCtx, pattern: str) -> List: """Get all items stored under a given pattern. Arguments: ctx: Redis context to use. pattern: key pattern to match. Return a list with the elements under the matched key. """ if not ctx: raise RequiredArgument('get_pattern', 'ctx') if not pattern: raise RequiredArgument('get_pattern', 'pattern') items = ctx.keys(pattern) elem_list = [] for item in items: elem_list.append( [ item, ctx.lrange(item, start=LIST_FIRST_POS, end=LIST_LAST_POS), ] ) return elem_list @classmethod def get_keys_by_pattern(cls, ctx: RedisCtx, pattern: str) -> List[str]: """Get all items with index 'index', stored under a given pattern. Arguments: ctx: Redis context to use. pattern: key pattern to match. Return a sorted list with the elements under the matched key """ if not ctx: raise RequiredArgument('get_elem_pattern_by_index', 'ctx') if not pattern: raise RequiredArgument('get_elem_pattern_by_index', 'pattern') return sorted(ctx.keys(pattern)) @classmethod def get_filenames_and_oids( cls, ctx: RedisCtx, ) -> Iterable[Tuple[str, str]]: """Get all items with index 'index', stored under a given pattern. Arguments: ctx: Redis context to use. Return an iterable where each single tuple contains the filename as first element and the oid as the second one. """ if not ctx: raise RequiredArgument('get_filenames_and_oids', 'ctx') items = cls.get_keys_by_pattern(ctx, 'nvt:*') return ((ctx.lindex(item, 0), item[4:]) for item in items) class BaseDB: def __init__(self, kbindex: int, ctx: Optional[RedisCtx] = None): if ctx is None: self.ctx = OpenvasDB.create_context(kbindex) else: self.ctx = ctx self.index = kbindex def flush(self): """ Flush the database """ self.ctx.flushdb() class BaseKbDB(BaseDB): def _add_single_item( self, name: str, values: Iterable, utf8_enc: Optional[bool] = False ): """Changing the encoding format of an existing redis context is not possible. Therefore a new temporary redis context is created to store key-values encoded with utf-8.""" if utf8_enc: ctx = OpenvasDB.create_context(self.index, encoding='utf-8') OpenvasDB.add_single_item(ctx, name, values) else: OpenvasDB.add_single_item(self.ctx, name, values) def _set_single_item(self, name: str, value: Iterable): """Set (replace) a single KB element. Arguments: name: key name of a list. value: New elements to add to the key. """ OpenvasDB.set_single_item(self.ctx, name, value) def _get_single_item(self, name: str) -> Optional[str]: """Get a single KB element. Arguments: name: key name of a list. """ return OpenvasDB.get_single_item(self.ctx, name) def _get_list_item( self, name: str, ) -> Optional[List]: """Returns the specified elements from `start` to `end` of the list stored as `name`. Arguments: name: key name of a list. Return List specified elements in the key. """ return OpenvasDB.get_list_item(self.ctx, name) def _pop_list_items(self, name: str) -> List: return OpenvasDB.pop_list_items(self.ctx, name) def _remove_list_item(self, key: str, value: str): """Remove item from the key list. Arguments: key: key name of a list. value: Value to be removed from the key. """ OpenvasDB.remove_list_item(self.ctx, key, value) def get_result(self) -> Optional[str]: """Get and remove the oldest result from the list. Return the oldest scan results """ return self._pop_list_items("internal/results") def get_status(self, openvas_scan_id: str) -> Optional[str]: """ Return the status of the host scan """ return self._get_single_item('internal/{}'.format(openvas_scan_id)) def __repr__(self): return '<{} index={}>'.format(self.__class__.__name__, self.index) class ScanDB(BaseKbDB): """ Database for a scanning a single host """ def select(self, kbindex: int) -> "ScanDB": """Select a redis kb. Arguments: kbindex: The new kb to select """ OpenvasDB.select_database(self.ctx, kbindex) self.index = kbindex return self class KbDB(BaseKbDB): def get_scan_databases(self) -> Iterator[ScanDB]: """Returns an iterator yielding corresponding ScanDBs The returned Iterator can't be converted to an Iterable like a List. Each yielded ScanDB must be used independently in a for loop. If the Iterator gets converted into an Iterable all returned ScanDBs will use the same redis context pointing to the same redis database. """ dbs = self._get_list_item('internal/dbindex') scan_db = ScanDB(self.index) for kbindex in dbs: if kbindex == self.index: continue yield scan_db.select(kbindex) def add_scan_id(self, scan_id: str): self._add_single_item('internal/{}'.format(scan_id), ['new']) self._add_single_item('internal/scanid', [scan_id]) def add_scan_preferences(self, openvas_scan_id: str, preferences: Iterable): self._add_single_item( 'internal/{}/scanprefs'.format(openvas_scan_id), preferences ) def add_credentials_to_scan_preferences( self, openvas_scan_id: str, preferences: Iterable ): """Force the usage of the utf-8 encoding, since some credentials contain special chars not supported by latin-1 encoding.""" self._add_single_item( 'internal/{}/scanprefs'.format(openvas_scan_id), preferences, utf8_enc=True, ) def add_scan_process_id(self, pid: int): self._add_single_item('internal/ovas_pid', [pid]) def get_scan_process_id(self) -> Optional[str]: return self._get_single_item('internal/ovas_pid') def remove_scan_database(self, scan_db: ScanDB): self._remove_list_item('internal/dbindex', scan_db.index) def target_is_finished(self, scan_id: str) -> bool: """ Check if a target has finished. """ status = self._get_single_item('internal/{}'.format(scan_id)) if status is None: logger.error( "%s: Target set as finished because redis returned None as " "scanner status.", scan_id, ) return status == 'finished' or status is None def stop_scan(self, openvas_scan_id: str): self._set_single_item( 'internal/{}'.format(openvas_scan_id), ['stop_all'] ) def scan_is_stopped(self, scan_id: str) -> bool: """Check if the scan should be stopped""" status = self._get_single_item('internal/%s' % scan_id) return status == 'stop_all' def get_scan_status(self) -> List: """Get and remove the oldest host scan status from the list. Return a string which represents the host scan status. """ return self._pop_list_items("internal/status") class MainDB(BaseDB): """ Main Database """ DEFAULT_INDEX = 0 def __init__(self, ctx=None): super().__init__(self.DEFAULT_INDEX, ctx) self._max_dbindex = None @property def max_database_index(self): """Set the number of databases have been configured into kbr struct.""" if self._max_dbindex is None: resp = self.ctx.config_get('databases') if len(resp) == 1: self._max_dbindex = int(resp.get('databases')) else: raise OspdOpenvasError( 'Redis Error: Not possible to get max_dbindex.' ) from None return self._max_dbindex def try_database(self, index: int) -> bool: """Check if a redis db is already in use. If not, set it as in use and return. Arguments: ctx: Redis object connected to the kb with the DBINDEX_NAME key. index: Number intended to be used. Return True if it is possible to use the db. False if the given db number is already in use. """ _in_use = 1 try: resp = self.ctx.hsetnx(DBINDEX_NAME, index, _in_use) except: raise OspdOpenvasError( 'Redis Error: Not possible to set %s.' % DBINDEX_NAME ) from None return resp == 1 def get_new_kb_database(self) -> Optional[KbDB]: """Return a new kb db to an empty kb.""" for index in range(1, self.max_database_index): if self.try_database(index): kbdb = KbDB(index) kbdb.flush() return kbdb return None def find_kb_database_by_scan_id( self, scan_id: str ) -> Tuple[Optional[str], Optional["KbDB"]]: """Find a kb db by via a scan id""" for index in range(1, self.max_database_index): ctx = OpenvasDB.create_context(index) if OpenvasDB.get_key_count(ctx, 'internal/{}'.format(scan_id)): return KbDB(index, ctx) return None def release_database(self, database: BaseDB): self.release_database_by_index(database.index) database.flush() def release_database_by_index(self, index: int): self.ctx.hdel(DBINDEX_NAME, index) def release(self): self.release_database(self) ospd-openvas-21.4.3/ospd_openvas/errors.py000066400000000000000000000017431413127500500206100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . """ Module for OSPD OpenVAS errors """ from ospd.errors import OspdError class OspdOpenvasError(OspdError): """An exception for gvm errors Base class for all exceptions originated in ospd-openvas. """ ospd-openvas-21.4.3/ospd_openvas/lock.py000066400000000000000000000075561413127500500202340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . import logging import time import fcntl from pathlib import Path logger = logging.getLogger(__name__) class LockFile: def __init__(self, path: Path): self._lock_file_path = path self._has_lock = False self._fd = None def has_lock(self) -> bool: return self._has_lock def _acquire_lock(self) -> "LockFile": """Acquire a lock by creating a lock file.""" if self.has_lock(): return self parent_dir = self._lock_file_path.parent try: # create parent directories recursively parent_dir.mkdir(parents=True, mode=0o770, exist_ok=True) except OSError as e: logger.error( "Could not create parent dir %s for lock file. %s", str(parent_dir), e, ) return self try: # Open the fd with append flag to create the file # if not exists and to avoid deleting the content # something else wrote in it. self._fd = self._lock_file_path.open('a') except Exception as e: # pylint: disable=broad-except logger.error( "Failed to open lock file %s. %s", str(self._lock_file_path), e, ) try: self._fd.close() self._fd = None except Exception: # pylint: disable=broad-except pass return self try: self._lock_file_path.chmod(0o660) except OSError as e: # ignore error because it is very likely that the file exists, has # the correct permissions but we are not the owner logger.debug( "Could not change permissions of lock file %s", str(self._lock_file_path), ) # Try to acquire the lock. try: fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB) self._has_lock = True logger.debug("Created lock file %s.", str(self._lock_file_path)) except BlockingIOError as e: logger.debug( "Failed to lock the file %s. %s", str(self._lock_file_path), e, ) try: self._fd.close() self._fd = None except Exception: # pylint: disable=broad-except pass return self def wait_for_lock(self): while not self.has_lock(): self._acquire_lock() time.sleep(10) return self def _release_lock(self) -> None: """Release the lock by deleting the lock file""" if self.has_lock() and self._fd: fcntl.flock(self._fd, fcntl.LOCK_UN) self._fd.close() self._fd = None self._has_lock = False logger.debug( "Removed lock from file %s.", str(self._lock_file_path) ) def __enter__(self): self._acquire_lock() return self def __exit__(self, exc_type, exc_value, exc_tb): self._release_lock() ospd-openvas-21.4.3/ospd_openvas/nvticache.py000066400000000000000000000247001413127500500212360ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . """ Provide functions to handle NVT Info Cache. """ import logging from typing import List, Dict, Optional, Iterator, Tuple from pathlib import Path from time import time from ospd.errors import RequiredArgument from ospd_openvas.errors import OspdOpenvasError from ospd_openvas.db import NVT_META_FIELDS, OpenvasDB, MainDB, BaseDB, RedisCtx NVTI_CACHE_NAME = "nvticache" logger = logging.getLogger(__name__) LIST_FIRST_POS = 0 LIST_LAST_POS = -1 class NVTICache(BaseDB): QOD_TYPES = { 'exploit': '100', 'remote_vul': '99', 'remote_app': '98', 'package': '97', 'registry': '97', 'remote_active': '95', 'remote_banner': '80', 'executable_version': '80', 'remote_analysis': '70', 'remote_probe': '50', 'remote_banner_unreliable': '30', 'executable_version_unreliable': '30', 'general_note': '1', 'default': '70', } def __init__( # pylint: disable=super-init-not-called self, main_db: MainDB ): self._ctx = None self.index = None self._main_db = main_db @property def ctx(self) -> Optional[RedisCtx]: if self._ctx is None: self._ctx, self.index = OpenvasDB.find_database_by_pattern( NVTI_CACHE_NAME, self._main_db.max_database_index ) return self._ctx def get_feed_version(self) -> Optional[str]: """Get feed version of the nvti cache db. Returns the feed version or None if the nvt feed isn't available. """ if not self.ctx: # no nvti cache db available yet return None return OpenvasDB.get_single_item(self.ctx, NVTI_CACHE_NAME) def get_oids(self) -> Iterator[Tuple[str, str]]: """Get the list of NVT file names and OIDs. Returns: A i. Each single list contains the filename as first element and the oid as second one. """ return OpenvasDB.get_filenames_and_oids(self.ctx) def get_nvt_params(self, oid: str) -> Optional[Dict[str, str]]: """Get NVT's preferences. Arguments: oid: OID of VT from which to get the parameters. Returns: A dictionary with preferences and timeout. """ prefs = self.get_nvt_prefs(oid) vt_params = {} if prefs: for nvt_pref in prefs: elem = nvt_pref.split('|||') param_id = elem[0] param_name = elem[1] param_type = elem[2] vt_params[param_id] = dict() vt_params[param_id]['id'] = param_id vt_params[param_id]['type'] = param_type vt_params[param_id]['name'] = param_name.strip() vt_params[param_id]['description'] = 'Description' if len(elem) > 3: param_default = elem[3] vt_params[param_id]['default'] = param_default else: vt_params[param_id]['default'] = '' return vt_params @staticmethod def _parse_metadata_tags(tags_str: str, oid: str) -> Dict[str, str]: """Parse a string with multiple tags. Arguments: tags_str: String with tags separated by `|`. oid: VT OID. Only used for logging in error case. Returns: A dictionary with the tags. """ tags_dict = dict() tags = tags_str.split('|') for tag in tags: try: _tag, _value = tag.split('=', 1) except ValueError: logger.error('Tag %s in %s has no value.', tag, oid) continue tags_dict[_tag] = _value return tags_dict def get_nvt_metadata(self, oid: str) -> Optional[Dict[str, str]]: """Get a full NVT. Returns an XML tree with the NVT metadata. Arguments: oid: OID of VT from which to get the metadata. Returns: A dictionary with the VT metadata. """ resp = OpenvasDB.get_list_item( self.ctx, "nvt:%s" % oid, start=NVT_META_FIELDS.index("NVT_FILENAME_POS"), end=NVT_META_FIELDS.index("NVT_NAME_POS"), ) if not isinstance(resp, list) or len(resp) == 0: return None subelem = [ 'filename', 'required_keys', 'mandatory_keys', 'excluded_keys', 'required_udp_ports', 'required_ports', 'dependencies', 'tag', 'cve', 'bid', 'xref', 'category', 'timeout', 'family', 'name', ] custom = dict() custom['refs'] = dict() custom['vt_params'] = dict() for child, res in zip(subelem, resp): if child not in ['cve', 'bid', 'xref', 'tag', 'timeout'] and res: custom[child] = res elif child == 'tag': custom.update(self._parse_metadata_tags(res, oid)) elif child in ['cve', 'bid', 'xref'] and res: custom['refs'][child] = res.split(", ") elif child == 'timeout': if res is None: continue vt_params = {} if int(res) > 0: _param_id = '0' vt_params[_param_id] = dict() vt_params[_param_id]['id'] = _param_id vt_params[_param_id]['type'] = 'entry' vt_params[_param_id]['name'] = 'timeout' vt_params[_param_id]['description'] = 'Script Timeout' vt_params[_param_id]['default'] = res custom['vt_params'] = vt_params custom['vt_params'].update(self.get_nvt_params(oid)) return custom def get_nvt_refs(self, oid: str) -> Optional[Dict[str, str]]: """Get a full NVT. Arguments: oid: OID of VT from which to get the VT references. Returns: A dictionary with the VT references. """ resp = OpenvasDB.get_list_item( self.ctx, "nvt:%s" % oid, start=NVT_META_FIELDS.index("NVT_CVES_POS"), end=NVT_META_FIELDS.index("NVT_XREFS_POS"), ) if not isinstance(resp, list) or len(resp) == 0: return None subelem = ['cve', 'bid', 'xref'] refs = dict() for child, res in zip(subelem, resp): refs[child] = res.split(", ") return refs def get_nvt_family(self, oid: str) -> str: """Get NVT family Arguments: oid: OID of VT from which to get the VT family. Returns: A str with the VT family. """ return OpenvasDB.get_single_item( self.ctx, 'nvt:%s' % oid, index=NVT_META_FIELDS.index("NVT_FAMILY_POS"), ) def get_nvt_prefs(self, oid: str) -> Optional[List[str]]: """Get NVT preferences. Arguments: ctx: Redis context to be used. oid: OID of VT from which to get the VT preferences. Returns: A list with the VT preferences. """ key = 'oid:%s:prefs' % oid return OpenvasDB.get_list_item(self.ctx, key) def get_nvt_timeout(self, oid: str) -> Optional[str]: """Get NVT timeout Arguments: ctx: Redis context to be used. oid: OID of VT from which to get the script timeout. Returns: The timeout. """ return OpenvasDB.get_single_item( self.ctx, 'nvt:%s' % oid, index=NVT_META_FIELDS.index("NVT_TIMEOUT_POS"), ) def get_nvt_tags(self, oid: str) -> Optional[Dict[str, str]]: """Get Tags of the given OID. Arguments: ctx: Redis context to be used. oid: OID of VT from which to get the VT tags. Returns: A dictionary with the VT tags. """ tag = OpenvasDB.get_single_item( self.ctx, 'nvt:%s' % oid, index=NVT_META_FIELDS.index('NVT_TAGS_POS'), ) tags = tag.split('|') return dict([item.split('=', 1) for item in tags]) def get_nvt_files_count(self) -> int: return OpenvasDB.get_key_count(self.ctx, "filename:*") def get_nvt_count(self) -> int: return OpenvasDB.get_key_count(self.ctx, "nvt:*") def force_reload(self): self._main_db.release_database(self) def add_vt_to_cache(self, vt_id: str, vt: List[str]): if not vt_id: raise RequiredArgument('add_vt_to_cache', 'vt_id') if not vt: raise RequiredArgument('add_vt_to_cache', 'vt') if not isinstance(vt, list) or len(vt) != 15: raise OspdOpenvasError( 'Error trying to load the VT' ' {} in cache'.format(vt) ) OpenvasDB.add_single_list(self.ctx, vt_id, vt) OpenvasDB.add_single_item(self.ctx, f'filename:{vt[0]}', [int(time())]) def get_file_checksum(self, file_abs_path: Path) -> str: """Get file sha256 checksum or md5 checksum Arguments: file_abs_path: File to get the checksum Returns: The checksum """ # Try to get first sha256 checksum sha256sum = OpenvasDB.get_single_item( self.ctx, f'sha256sums:{file_abs_path}', ) if sha256sum: return sha256sum # Search for md5 checksum md5sum = OpenvasDB.get_single_item( self.ctx, f'md5sums:{file_abs_path}', ) if md5sum: return md5sum ospd-openvas-21.4.3/ospd_openvas/openvas.py000066400000000000000000000130431413127500500207430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . from typing import Optional, Dict, Any import logging import subprocess import psutil logger = logging.getLogger(__name__) _BOOL_DICT = {'no': 0, 'yes': 1} class Openvas: """Class for calling the openvas executable""" @staticmethod def _get_version_output() -> Optional[str]: try: result = subprocess.check_output( ['openvas', '-V'], stderr=subprocess.STDOUT ) return result.decode('ascii') except (subprocess.SubprocessError, OSError) as e: logger.debug( 'Is was not possible to call openvas to get the version ' 'information. Reason %s', e, ) return None @staticmethod def check() -> bool: """Checks that openvas command line tool is found and is executable. """ try: subprocess.check_call(['openvas', '-V'], stdout=subprocess.DEVNULL) return True except (subprocess.SubprocessError, OSError) as e: logger.debug( 'It was not possible to call the openvas executable. Reason %s', e, ) return False @staticmethod def check_sudo() -> bool: """Checks if openvas can be run with sudo""" try: subprocess.check_call( ['sudo', '-n', 'openvas', '-s'], stdout=subprocess.DEVNULL ) return True except (subprocess.SubprocessError, OSError) as e: logger.debug( 'It was not possible to call openvas with sudo. ' 'The scanner will run as non-root user. Reason %s', e, ) return False @classmethod def get_version(cls) -> Optional[str]: """Returns the version string of the openvas executable""" result = cls._get_version_output() if result is None: return None version = result.split('\n') if version[0].find('OpenVAS') < 0: return None return version[0] @staticmethod def get_settings() -> Dict[str, Any]: """Parses the current settings of the openvas executable""" param_list = dict() try: result = subprocess.check_output(['openvas', '-s']) result = result.decode('ascii') except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: logger.warning('Could not gather openvas settings. Reason %s', e) return param_list for conf in result.split('\n'): if not conf: continue try: key, value = conf.split('=', 1) except ValueError: logger.warning("Could not parse openvas setting '%s'", conf) continue key = key.strip() value = value.strip() if value: value = _BOOL_DICT.get(value, value) param_list[key] = value return param_list @staticmethod def load_vts_into_redis(): """Loads all VTs into the redis database""" logger.debug('Loading VTs into Redis DB...') try: subprocess.check_call( ['openvas', '--update-vt-info'], stdout=subprocess.DEVNULL ) logger.debug('Finished loading VTs into Redis DB') except (subprocess.SubprocessError, OSError) as err: logger.error('OpenVAS Scanner failed to load VTs. %s', err) @staticmethod def start_scan( scan_id: str, sudo: bool = False, niceness: int = None, ) -> Optional[psutil.Popen]: """Calls openvas to start a scan process""" cmd = [] if niceness: cmd += ['nice', '-n', niceness] logger.debug("Starting scan with niceness %s", niceness) if sudo: cmd += ['sudo', '-n'] cmd += ['openvas', '--scan-start', scan_id] try: return psutil.Popen(cmd, shell=False) except (psutil.Error, OSError, FileNotFoundError) as e: # the command is not available logger.warning("Could not start scan process. Reason %s", e) return None @staticmethod def stop_scan(scan_id: str, sudo: bool = False) -> bool: """Calls openvas to stop a scan process""" cmd = [] if sudo: cmd += ['sudo', '-n'] cmd += ['openvas', '--scan-stop', scan_id] try: subprocess.check_call(cmd) return True except (subprocess.SubprocessError, OSError) as e: # the command is not available logger.warning( 'Not possible to stop scan: %s. Reason %s', scan_id, e, ) return False ospd-openvas-21.4.3/ospd_openvas/preferencehandler.py000066400000000000000000000631041413127500500227470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=too-many-lines """ Prepare the preferences to be used by OpenVAS. Get the data from the scan collection and store the data in a redis KB in the right format to be used by OpenVAS. """ import logging import binascii from enum import IntEnum from typing import Optional, Dict, List, Tuple from base64 import b64decode from ospd.scan import ScanCollection from ospd.ospd import BASE_SCANNER_PARAMS from ospd_openvas.openvas import Openvas from ospd_openvas.db import KbDB from ospd_openvas.nvticache import NVTICache from ospd_openvas.vthelper import VtHelper logger = logging.getLogger(__name__) OID_SSH_AUTH = "1.3.6.1.4.1.25623.1.0.103591" OID_SMB_AUTH = "1.3.6.1.4.1.25623.1.0.90023" OID_ESXI_AUTH = "1.3.6.1.4.1.25623.1.0.105058" OID_SNMP_AUTH = "1.3.6.1.4.1.25623.1.0.105076" OID_PING_HOST = "1.3.6.1.4.1.25623.1.0.100315" BOREAS_ALIVE_TEST = "ALIVE_TEST" BOREAS_ALIVE_TEST_PORTS = "ALIVE_TEST_PORTS" BOREAS_SETTING_NAME = "test_alive_hosts_only" class AliveTest(IntEnum): """Alive Tests.""" ALIVE_TEST_SCAN_CONFIG_DEFAULT = 0 ALIVE_TEST_TCP_ACK_SERVICE = 1 ALIVE_TEST_ICMP = 2 ALIVE_TEST_ARP = 4 ALIVE_TEST_CONSIDER_ALIVE = 8 ALIVE_TEST_TCP_SYN_SERVICE = 16 def alive_test_methods_to_bit_field( icmp: bool, tcp_syn: bool, tcp_ack: bool, arp: bool, consider_alive: bool ) -> int: """Internally a bit field is used as alive test. This function creates such a bit field out of the supplied alive test methods. """ icmp_enum = AliveTest.ALIVE_TEST_ICMP if icmp else 0 tcp_syn_enum = AliveTest.ALIVE_TEST_TCP_SYN_SERVICE if tcp_syn else 0 tcp_ack_enum = AliveTest.ALIVE_TEST_TCP_ACK_SERVICE if tcp_ack else 0 arp_enum = AliveTest.ALIVE_TEST_ARP if arp else 0 consider_alive_enum = ( AliveTest.ALIVE_TEST_CONSIDER_ALIVE if consider_alive else 0 ) bit_field = ( icmp_enum | tcp_syn_enum | tcp_ack_enum | arp_enum | consider_alive_enum ) return bit_field def _from_bool_to_str(value: int) -> str: """The OpenVAS scanner use yes and no as boolean values, whereas ospd uses 1 and 0.""" return 'yes' if value == 1 else 'no' class PreferenceHandler: def __init__( self, scan_id: str, kbdb: KbDB, scan_collection: ScanCollection, nvticache: NVTICache, ): self.scan_id = scan_id self.kbdb = kbdb self.scan_collection = scan_collection self._target_options = None self._nvts_params = None self.nvti = nvticache def prepare_scan_id_for_openvas(self): """Create the openvas scan id and store it in the redis kb. Return the openvas scan_id. """ self.kbdb.add_scan_id(self.scan_id) @property def target_options(self) -> Dict: """Return target options from Scan collection""" if self._target_options is not None: return self._target_options self._target_options = self.scan_collection.get_target_options( self.scan_id ) return self._target_options def _get_vts_in_groups( self, filters: List[str], ) -> List[str]: """Return a list of vts which match with the given filter. Arguments: filters A list of filters. Each filter has key, operator and a value. They are separated by a space. Supported keys: family Returns a list of vt oids which match with the given filter. """ vts_list = list() families = dict() oids = self.nvti.get_oids() for _, oid in oids: family = self.nvti.get_nvt_family(oid) if family not in families: families[family] = list() families[family].append(oid) for elem in filters: key, value = elem.split('=') if key == 'family' and value in families: vts_list.extend(families[value]) return vts_list def _get_vt_param_type(self, vt: Dict, vt_param_id: str) -> Optional[str]: """Return the type of the vt parameter from the vts dictionary.""" vt_params_list = vt.get("vt_params") if vt_params_list.get(vt_param_id): return vt_params_list[vt_param_id]["type"] return None def _get_vt_param_name(self, vt: Dict, vt_param_id: str) -> Optional[str]: """Return the type of the vt parameter from the vts dictionary.""" vt_params_list = vt.get("vt_params") if vt_params_list.get(vt_param_id): return vt_params_list[vt_param_id]["name"] return None @staticmethod def check_param_type(vt_param_value: str, param_type: str) -> Optional[int]: """Check if the value of a vt parameter matches with the type founded. """ if ( param_type in [ 'entry', 'password', 'radio', 'sshlogin', ] and isinstance(vt_param_value, str) ): return None elif param_type == 'checkbox' and ( vt_param_value == '0' or vt_param_value == '1' ): return None elif param_type == 'file': try: b64decode(vt_param_value.encode()) except (binascii.Error, AttributeError, TypeError): return 1 return None elif param_type == 'integer': try: int(vt_param_value) except ValueError: return 1 return None return 1 def _process_vts( self, vts: Dict[str, Dict[str, str]], ) -> Tuple[List[str], Dict[str, str]]: """Add single VTs and their parameters.""" vts_list = [] vts_params = {} vtgroups = vts.pop('vt_groups') vthelper = VtHelper(self.nvti) if vtgroups: vts_list = self._get_vts_in_groups(vtgroups) for vtid, vt_params in vts.items(): vt = vthelper.get_single_vt(vtid) if not vt: logger.warning( 'The VT %s was not found and it will not be added to the ' 'plugin scheduler.', vtid, ) continue vts_list.append(vtid) for vt_param_id, vt_param_value in vt_params.items(): param_type = self._get_vt_param_type(vt, vt_param_id) param_name = self._get_vt_param_name(vt, vt_param_id) if vt_param_id > '0' and (not param_type or not param_name): logger.debug( 'Missing type or name for VT parameter %s of %s. ' 'This VT parameter will not be set.', vt_param_id, vtid, ) continue if vt_param_id == '0': type_aux = 'integer' else: type_aux = param_type if self.check_param_type(vt_param_value, type_aux): logger.debug( 'The VT parameter %s for %s could not be set. ' 'Expected %s type for parameter value %s', vt_param_id, vtid, type_aux, str(vt_param_value), ) continue if type_aux == 'checkbox': vt_param_value = _from_bool_to_str(int(vt_param_value)) if vt_param_id == '0': vts_params["timeout.{0}".format(vtid)] = str(vt_param_value) else: vts_params[ "{0}:{1}:{2}:{3}".format( vtid, vt_param_id, param_type, param_name ) ] = str(vt_param_value) return vts_list, vts_params def prepare_plugins_for_openvas(self) -> bool: """Get the plugin list and it preferences from the Scan Collection. The plugin list is immediately stored in the kb. """ nvts = self.scan_collection.get_vts(self.scan_id) if nvts: nvts_list, self._nvts_params = self._process_vts(nvts) # Add nvts list separ = ';' plugin_list = 'plugin_set|||%s' % separ.join(nvts_list) self.kbdb.add_scan_preferences(self.scan_id, [plugin_list]) nvts_list = None plugin_list = None nvts = None return True return False def prepare_nvt_preferences(self): """Prepare the vts preferences. Store the data in the kb.""" items_list = [] for key, val in self._nvts_params.items(): items_list.append('%s|||%s' % (key, val)) if items_list: self.kbdb.add_scan_preferences(self.scan_id, items_list) @staticmethod def build_alive_test_opt_as_prefs( target_options: Dict[str, str] ) -> Dict[str, str]: """Parse the target options dictionary. Arguments: target_options: Dictionary with the target options. Return: A dict with the target options related to alive test method in string format to be added to the redis KB. """ target_opt_prefs_list = {} alive_test = None if target_options: # Alive test specified as bit field. alive_test = target_options.get('alive_test') # Alive test specified as individual methods. alive_test_methods = target_options.get('alive_test_methods') # alive_test takes precedence over alive_test_methods if alive_test is None and alive_test_methods: alive_test = alive_test_methods_to_bit_field( icmp=target_options.get('icmp') == '1', tcp_syn=target_options.get('tcp_syn') == '1', tcp_ack=target_options.get('tcp_ack') == '1', arp=target_options.get('arp') == '1', consider_alive=target_options.get('consider_alive') == '1', ) if target_options and alive_test: try: alive_test = int(alive_test) except ValueError: logger.debug( 'Alive test settings not applied. ' 'Invalid alive test value %s', target_options.get('alive_test'), ) return target_opt_prefs_list # No alive test or wrong value, uses the default # preferences sent by the client. if alive_test < 1 or alive_test > 31: return target_opt_prefs_list if ( alive_test & AliveTest.ALIVE_TEST_TCP_ACK_SERVICE or alive_test & AliveTest.ALIVE_TEST_TCP_SYN_SERVICE ): value = "yes" else: value = "no" target_opt_prefs_list[ OID_PING_HOST + ':1:checkbox:' + 'Do a TCP ping' ] = value if ( alive_test & AliveTest.ALIVE_TEST_TCP_SYN_SERVICE and alive_test & AliveTest.ALIVE_TEST_TCP_ACK_SERVICE ): value = "yes" else: value = "no" target_opt_prefs_list[ OID_PING_HOST + ':2:checkbox:' + 'TCP ping tries also TCP-SYN ping' ] = value if (alive_test & AliveTest.ALIVE_TEST_TCP_SYN_SERVICE) and not ( alive_test & AliveTest.ALIVE_TEST_TCP_ACK_SERVICE ): value = "yes" else: value = "no" target_opt_prefs_list[ OID_PING_HOST + ':7:checkbox:' + 'TCP ping tries only TCP-SYN ping' ] = value if alive_test & AliveTest.ALIVE_TEST_ICMP: value = "yes" else: value = "no" target_opt_prefs_list[ OID_PING_HOST + ':3:checkbox:' + 'Do an ICMP ping' ] = value if alive_test & AliveTest.ALIVE_TEST_ARP: value = "yes" else: value = "no" target_opt_prefs_list[ OID_PING_HOST + ':4:checkbox:' + 'Use ARP' ] = value if alive_test & AliveTest.ALIVE_TEST_CONSIDER_ALIVE: value = "no" else: value = "yes" target_opt_prefs_list[ OID_PING_HOST + ':5:checkbox:' + 'Mark unrechable Hosts as dead (not scanning)' ] = value return target_opt_prefs_list def prepare_alive_test_option_for_openvas(self): """Set alive test option. Overwrite the scan config settings.""" settings = Openvas.get_settings() if settings and ( self.target_options.get('alive_test') or self.target_options.get('alive_test_methods') ): alive_test_opt = self.build_alive_test_opt_as_prefs( self.target_options ) self._nvts_params.update(alive_test_opt) def prepare_boreas_alive_test(self): """Set alive_test for Boreas if boreas scanner config (BOREAS_SETTING_NAME) was set""" settings = Openvas.get_settings() alive_test = None alive_test_ports = None target_options = self.target_options if settings: boreas = settings.get(BOREAS_SETTING_NAME) if not boreas: return else: return if target_options: alive_test_ports = target_options.get('alive_test_ports') # Alive test was specified as bit field. alive_test = target_options.get('alive_test') # Alive test was specified as individual methods. alive_test_methods = target_options.get('alive_test_methods') # takes precedence over if alive_test is None and alive_test_methods: alive_test = alive_test_methods_to_bit_field( icmp=target_options.get('icmp') == '1', tcp_syn=target_options.get('tcp_syn') == '1', tcp_ack=target_options.get('tcp_ack') == '1', arp=target_options.get('arp') == '1', consider_alive=target_options.get('consider_alive') == '1', ) if alive_test is not None: try: alive_test = int(alive_test) except ValueError: logger.debug( 'Alive test preference for Boreas not set. ' 'Invalid alive test value %s.', alive_test, ) # Use default alive test as fall back alive_test = AliveTest.ALIVE_TEST_SCAN_CONFIG_DEFAULT # Use default alive test if no valid alive_test was provided else: alive_test = AliveTest.ALIVE_TEST_SCAN_CONFIG_DEFAULT # If a valid alive_test was set then the bit mask # has value between 31 (11111) and 1 (10000) if 1 <= alive_test <= 31: pref = "{pref_key}|||{pref_value}".format( pref_key=BOREAS_ALIVE_TEST, pref_value=alive_test ) self.kbdb.add_scan_preferences(self.scan_id, [pref]) if alive_test == AliveTest.ALIVE_TEST_SCAN_CONFIG_DEFAULT: alive_test = AliveTest.ALIVE_TEST_ICMP pref = "{pref_key}|||{pref_value}".format( pref_key=BOREAS_ALIVE_TEST, pref_value=alive_test ) self.kbdb.add_scan_preferences(self.scan_id, [pref]) # Add portlist if present. Validity is checked on Boreas side. if alive_test_ports is not None: pref = "{pref_key}|||{pref_value}".format( pref_key=BOREAS_ALIVE_TEST_PORTS, pref_value=alive_test_ports, ) self.kbdb.add_scan_preferences(self.scan_id, [pref]) def prepare_reverse_lookup_opt_for_openvas(self): """Set reverse lookup options in the kb""" if self.target_options: items = [] _rev_lookup_only = int( self.target_options.get('reverse_lookup_only', '0') ) rev_lookup_only = _from_bool_to_str(_rev_lookup_only) items.append('reverse_lookup_only|||%s' % (rev_lookup_only)) _rev_lookup_unify = int( self.target_options.get('reverse_lookup_unify', '0') ) rev_lookup_unify = _from_bool_to_str(_rev_lookup_unify) items.append('reverse_lookup_unify|||%s' % rev_lookup_unify) self.kbdb.add_scan_preferences(self.scan_id, items) def prepare_target_for_openvas(self): """Get the target from the scan collection and set the target in the kb""" target = self.scan_collection.get_host_list(self.scan_id) target_aux = 'TARGET|||%s' % target self.kbdb.add_scan_preferences(self.scan_id, [target_aux]) def prepare_ports_for_openvas(self) -> str: """Get the port list from the scan collection and store the list in the kb.""" ports = self.scan_collection.get_ports(self.scan_id) port_range = 'port_range|||%s' % ports self.kbdb.add_scan_preferences(self.scan_id, [port_range]) return ports def prepare_host_options_for_openvas(self): """Get the excluded and finished hosts from the scan collection and stores the list of hosts that must not be scanned in the kb.""" exclude_hosts = self.scan_collection.get_exclude_hosts(self.scan_id) if exclude_hosts: pref_val = "exclude_hosts|||" + exclude_hosts self.kbdb.add_scan_preferences(self.scan_id, [pref_val]) def prepare_scan_params_for_openvas(self, ospd_params: Dict[str, Dict]): """Get the scan parameters from the scan collection and store them in the kb. Arguments: ospd_params: Dictionary with the OSPD Params. """ # Options which were supplied via the XML element. options = self.scan_collection.get_options(self.scan_id) prefs_val = [] for key, value in options.items(): item_type = '' if key in ospd_params: item_type = ospd_params[key].get('type') else: if key not in BASE_SCANNER_PARAMS: logger.debug( "%s is a scanner only setting and should not be set " "by the client. Setting needs to be included in " "OpenVAS configuration file instead.", key, ) if item_type == 'boolean': val = _from_bool_to_str(value) else: val = str(value) prefs_val.append(key + "|||" + val) if prefs_val: self.kbdb.add_scan_preferences(self.scan_id, prefs_val) @staticmethod def build_credentials_as_prefs(credentials: Dict) -> List[str]: """Parse the credential dictionary. Arguments: credentials: Dictionary with the credentials. Return: A list with the credentials in string format to be added to the redis KB. """ cred_prefs_list = [] for credential in credentials.items(): service = credential[0] cred_params = credentials.get(service) cred_type = cred_params.get('type', '') username = cred_params.get('username', '') password = cred_params.get('password', '') if service == 'ssh': port = cred_params.get('port', '') priv_username = cred_params.get('priv_username', '') priv_password = cred_params.get('priv_password', '') cred_prefs_list.append('auth_port_ssh|||' + '{0}'.format(port)) cred_prefs_list.append( OID_SSH_AUTH + ':7:' + 'entry:SSH privilege login name:|||{0}'.format( priv_username ) ) cred_prefs_list.append( OID_SSH_AUTH + ':8:' + 'password:SSH privilege password:|||{0}'.format( priv_password ) ) cred_prefs_list.append( OID_SSH_AUTH + ':1:' + 'entry:SSH login ' + 'name:|||{0}'.format(username) ) if cred_type == 'up': cred_prefs_list.append( OID_SSH_AUTH + ':3:' + 'password:SSH password ' + '(unsafe!):|||{0}'.format(password) ) else: private = cred_params.get('private', '') cred_prefs_list.append( OID_SSH_AUTH + ':2:' + 'password:SSH key passphrase:|||' + '{0}'.format(password) ) cred_prefs_list.append( OID_SSH_AUTH + ':4:' + 'file:SSH private key:|||' + '{0}'.format(private) ) if service == 'smb': cred_prefs_list.append( OID_SMB_AUTH + ':1:entry' + ':SMB login:|||{0}'.format(username) ) cred_prefs_list.append( OID_SMB_AUTH + ':2:' + 'password:SMB password:|||' + '{0}'.format(password) ) if service == 'esxi': cred_prefs_list.append( OID_ESXI_AUTH + ':1:entry:' + 'ESXi login name:|||' + '{0}'.format(username) ) cred_prefs_list.append( OID_ESXI_AUTH + ':2:' + 'password:ESXi login password:|||' + '{0}'.format(password) ) if service == 'snmp': community = cred_params.get('community', '') auth_algorithm = cred_params.get('auth_algorithm', '') privacy_password = cred_params.get('privacy_password', '') privacy_algorithm = cred_params.get('privacy_algorithm', '') cred_prefs_list.append( OID_SNMP_AUTH + ':1:' + 'password:SNMP Community:|||' + '{0}'.format(community) ) cred_prefs_list.append( OID_SNMP_AUTH + ':2:' + 'entry:SNMPv3 Username:|||' + '{0}'.format(username) ) cred_prefs_list.append( OID_SNMP_AUTH + ':3:' 'password:SNMPv3 Password:|||' + '{0}'.format(password) ) cred_prefs_list.append( OID_SNMP_AUTH + ':4:' + 'radio:SNMPv3 Authentication Algorithm:|||' + '{0}'.format(auth_algorithm) ) cred_prefs_list.append( OID_SNMP_AUTH + ':5:' + 'password:SNMPv3 Privacy Password:|||' + '{0}'.format(privacy_password) ) cred_prefs_list.append( OID_SNMP_AUTH + ':6:' + 'radio:SNMPv3 Privacy Algorithm:|||' + '{0}'.format(privacy_algorithm) ) return cred_prefs_list def prepare_credentials_for_openvas(self) -> bool: """Get the credentials from the scan collection and store them in the kb.""" credentials = self.scan_collection.get_credentials(self.scan_id) if credentials: cred_prefs = self.build_credentials_as_prefs(credentials) if cred_prefs: self.kbdb.add_credentials_to_scan_preferences( self.scan_id, cred_prefs ) if credentials and not cred_prefs: return False return True def prepare_main_kbindex_for_openvas(self): """Store main_kbindex as global preference in the kb, used by OpenVAS""" ov_maindbid = 'ov_maindbid|||%d' % self.kbdb.index self.kbdb.add_scan_preferences(self.scan_id, [ov_maindbid]) ospd-openvas-21.4.3/ospd_openvas/vthelper.py000066400000000000000000000146631413127500500211320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . """ Provide functions to handle VT Info. """ from hashlib import sha256 from typing import Optional, Dict, List, Tuple, Iterator from ospd_openvas.nvticache import NVTICache class VtHelper: def __init__(self, nvticache: NVTICache): self.nvti = nvticache def get_single_vt(self, vt_id: str, oids=None) -> Optional[Dict[str, any]]: custom = self.nvti.get_nvt_metadata(vt_id) if not custom: return None vt_params = custom.pop('vt_params') vt_refs = custom.pop('refs') name = custom.pop('name') vt_creation_time = custom.pop('creation_date') vt_modification_time = custom.pop('last_modification') if oids: vt_dependencies = list() if 'dependencies' in custom: deps = custom.pop('dependencies') deps_list = deps.split(', ') for dep_name in deps_list: dep_oid = oids.get(dep_name) if dep_oid: vt_dependencies.append(dep_oid) else: vt_dependencies.append(dep_name) else: vt_dependencies = None summary = None impact = None affected = None insight = None solution = None solution_t = None solution_m = None vuldetect = None qod_t = None qod_v = None if 'summary' in custom: summary = custom.pop('summary') if 'impact' in custom: impact = custom.pop('impact') if 'affected' in custom: affected = custom.pop('affected') if 'insight' in custom: insight = custom.pop('insight') if 'solution' in custom: solution = custom.pop('solution') if 'solution_type' in custom: solution_t = custom.pop('solution_type') if 'solution_method' in custom: solution_m = custom.pop('solution_method') if 'vuldetect' in custom: vuldetect = custom.pop('vuldetect') if 'qod_type' in custom: qod_t = custom.pop('qod_type') elif 'qod' in custom: qod_v = custom.pop('qod') severity = dict() if 'severity_vector' in custom: severity_vector = custom.pop('severity_vector') else: severity_vector = custom.pop('cvss_base_vector') severity['severity_base_vector'] = severity_vector if "CVSS:3" in severity_vector: severity_type = 'cvss_base_v3' else: severity_type = 'cvss_base_v2' severity['severity_type'] = severity_type if 'severity_date' in custom: severity['severity_date'] = custom.pop('severity_date') else: severity['severity_date'] = vt_creation_time if 'severity_origin' in custom: severity['severity_origin'] = custom.pop('severity_origin') if name is None: name = '' vt = {'name': name} if custom is not None: vt["custom"] = custom if vt_params is not None: vt["vt_params"] = vt_params if vt_refs is not None: vt["vt_refs"] = vt_refs if vt_dependencies is not None: vt["vt_dependencies"] = vt_dependencies if vt_creation_time is not None: vt["creation_time"] = vt_creation_time if vt_modification_time is not None: vt["modification_time"] = vt_modification_time if summary is not None: vt["summary"] = summary if impact is not None: vt["impact"] = impact if affected is not None: vt["affected"] = affected if insight is not None: vt["insight"] = insight if solution is not None: vt["solution"] = solution if solution_t is not None: vt["solution_type"] = solution_t if solution_m is not None: vt["solution_method"] = solution_m if vuldetect is not None: vt["detection"] = vuldetect if qod_t is not None: vt["qod_type"] = qod_t elif qod_v is not None: vt["qod"] = qod_v if severity is not None: vt["severities"] = severity return vt def get_vt_iterator( self, vt_selection: List[str] = None, details: bool = True ) -> Iterator[Tuple[str, Dict]]: """ Yield the vts from the Redis NVTicache. """ oids = None if not vt_selection or details: vt_collection = dict(self.nvti.get_oids()) if not vt_selection: vt_selection = vt_collection.values() if details: oids = vt_collection for vt_id in vt_selection: vt = self.get_single_vt(vt_id, oids) yield (vt_id, vt) def calculate_vts_collection_hash(self) -> str: """ Calculate the vts collection sha256 hash. """ m = sha256() # pylint: disable=invalid-name # for a reproducible hash calculation # the vts must already be sorted in the dictionary. for vt_id, vt in self.get_vt_iterator(details=False): param_chain = "" vt_params = vt.get('vt_params') if vt_params: for _, param in sorted(vt_params.items()): param_chain += ( param.get('id') + param.get('name') + param.get('default') ) m.update( (vt_id + vt.get('modification_time')).encode('utf-8') + param_chain.encode('utf-8') ) return m.hexdigest() ospd-openvas-21.4.3/poetry.lock000066400000000000000000001577401413127500500164270ustar00rootroot00000000000000[[package]] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = "*" [[package]] name = "astroid" version = "2.6.6" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} wrapt = ">=1.11,<1.13" [[package]] name = "autohooks" version = "21.7.0" description = "Library for managing git hooks" category = "dev" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] colorful = ">=0.5.4,<0.6.0" packaging = ">=20.3,<21.0" tomlkit = ">=0.5.11" [[package]] name = "autohooks-plugin-black" version = "21.7.1" description = "An autohooks plugin for python code formatting via black" category = "dev" optional = false python-versions = ">=3.6.2,<4.0" [package.dependencies] autohooks = ">=2.2.0" black = {version = ">=20.8b1,<=21.7b0", markers = "python_version >= \"3.6\" and python_version < \"4.0\""} [[package]] name = "autohooks-plugin-pylint" version = "21.6.0" description = "An autohooks plugin for python code linting via pylint" category = "dev" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] autohooks = ">=2.2.0" pylint = ">=2.8.3,<3.0.0" [[package]] name = "bcrypt" version = "3.2.0" description = "Modern password hashing for your software and your servers" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] cffi = ">=1.1" six = ">=1.4.1" [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] [[package]] name = "black" version = "20.8b1" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] appdirs = "*" click = ">=7.1.2" mypy-extensions = ">=0.4.3" pathspec = ">=0.6,<1" regex = ">=2020.1.8" toml = ">=0.10.1" typed-ast = ">=1.4.0" typing-extensions = ">=3.7.4" [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "certifi" version = "2021.5.30" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = "*" [[package]] name = "cffi" version = "1.14.6" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = "*" [package.dependencies] pycparser = "*" [[package]] name = "charset-normalizer" version = "2.0.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false python-versions = ">=3.5.0" [package.extras] unicode_backport = ["unicodedata2"] [[package]] name = "click" version = "8.0.1" description = "Composable command line interface toolkit" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "colorful" version = "0.5.4" description = "Terminal string styling done right, in Python." category = "dev" optional = false python-versions = "*" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cryptography" version = "3.4.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "deprecated" version = "1.2.12" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] wrapt = ">=1.10,<2" [package.extras] dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] [[package]] name = "idna" version = "3.2" description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false python-versions = ">=3.5" [[package]] name = "importlib-metadata" version = "4.6.3" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "isort" version = "5.9.3" description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.6.1,<4.0" [package.extras] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] [[package]] name = "lazy-object-proxy" version = "1.6.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "lxml" version = "4.6.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["beautifulsoup4"] source = ["Cython (>=0.29.7)"] [[package]] name = "mccabe" version = "0.6.1" description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = "*" [[package]] name = "mypy-extensions" version = "0.4.3" description = "Experimental type system extensions for programs checked with the mypy typechecker." category = "dev" optional = false python-versions = "*" [[package]] name = "ospd" version = "21.4.3" description = "OSPD is a base for scanner wrappers which share the same communication protocol: OSP (Open Scanner Protocol)" category = "main" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] defusedxml = ">=0.6,<0.8" deprecated = ">=1.2.10,<2.0.0" lxml = ">=4.5.2,<5.0.0" paramiko = ">=2.7.1,<3.0.0" psutil = ">=5.7.2,<6.0.0" [[package]] name = "packaging" version = "20.9" description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pyparsing = ">=2.0.2" [[package]] name = "paramiko" version = "2.7.2" description = "SSH2 protocol library" category = "main" optional = false python-versions = "*" [package.dependencies] bcrypt = ">=3.1.3" cryptography = ">=2.5" pynacl = ">=1.0.1" [package.extras] all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=1.3)"] [[package]] name = "pathspec" version = "0.9.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pontos" version = "21.7.4" description = "Common utilities and tools maintained by Greenbone Networks" category = "dev" optional = false python-versions = ">=3.7,<4.0" [package.dependencies] colorful = ">=0.5.4,<0.6.0" packaging = ">=20.3,<21.0" requests = ">=2.24.0,<3.0.0" tomlkit = ">=0.5.11" [[package]] name = "psutil" version = "5.8.0" description = "Cross-platform lib for process and system monitoring in Python." category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] [[package]] name = "pycparser" version = "2.20" description = "C parser in Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pylint" version = "2.9.6" description = "python code static checker" category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] astroid = ">=2.6.5,<2.7" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" toml = ">=0.7.1" [[package]] name = "pynacl" version = "1.4.0" description = "Python binding to the Networking and Cryptography (NaCl) library" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] cffi = ">=1.4.1" six = "*" [package.extras] docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] [[package]] name = "pyparsing" version = "2.4.7" description = "Python parsing module" category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "redis" version = "3.5.3" description = "Python client for Redis key-value store" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] hiredis = ["hiredis (>=0.1.3)"] [[package]] name = "regex" version = "2021.8.3" description = "Alternative regular expression module, to replace re." category = "dev" optional = false python-versions = "*" [[package]] name = "requests" version = "2.26.0" description = "Python HTTP for Humans." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rope" version = "0.18.0" description = "a python refactoring library..." category = "dev" optional = false python-versions = "*" [package.extras] dev = ["pytest"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomlkit" version = "0.7.2" description = "Style preserving TOML library" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "typed-ast" version = "1.4.3" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false python-versions = "*" [[package]] name = "typing-extensions" version = "3.10.0.0" description = "Backported and Experimental Type Hints for Python 3.5+" category = "dev" optional = false python-versions = "*" [[package]] name = "urllib3" version = "1.26.6" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "wrapt" version = "1.12.1" description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false python-versions = "*" [[package]] name = "zipp" version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" python-versions = "^3.7" content-hash = "9faa31d9c808626ee689f55d48f28fa2bac7f448a5a70d688c029555d7fad056" [metadata.files] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] astroid = [ {file = "astroid-2.6.6-py3-none-any.whl", hash = "sha256:ab7f36e8a78b8e54a62028ba6beef7561db4cdb6f2a5009ecc44a6f42b5697ef"}, {file = "astroid-2.6.6.tar.gz", hash = "sha256:3975a0bd5373bdce166e60c851cfcbaf21ee96de80ec518c1f4cb3e94c3fb334"}, ] autohooks = [ {file = "autohooks-21.7.0-py3-none-any.whl", hash = "sha256:54bda89bbd5b19d83538a110bca0088b38378a4c0e1cf14581792f3e41a5a260"}, {file = "autohooks-21.7.0.tar.gz", hash = "sha256:8d09ee52f67cedd5873578a9b4d97b6b80fd9db6dca6d1844621d02a197a12e2"}, ] autohooks-plugin-black = [ {file = "autohooks-plugin-black-21.7.1.tar.gz", hash = "sha256:e4df68d9bac84e33688fc4881243a874a3eac49fad8bcede36bd5167899d4f01"}, {file = "autohooks_plugin_black-21.7.1-py3-none-any.whl", hash = "sha256:74f615a753e365c6c5f259a884eb3e277f339e6b71a24d31158ce19a41cb28aa"}, ] autohooks-plugin-pylint = [ {file = "autohooks-plugin-pylint-21.6.0.tar.gz", hash = "sha256:6f1a486f19c8012618fecd9eaa985f5da3ef5c954b32d823323439547b947302"}, {file = "autohooks_plugin_pylint-21.6.0-py3-none-any.whl", hash = "sha256:0d6ab34739d4c494012aa1647c02e6a4fb9474e4050432ee82f378fcd05c70e4"}, ] bcrypt = [ {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, ] black = [ {file = "black-20.8b1-py3-none-any.whl", hash = "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b"}, {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] certifi = [ {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, ] cffi = [ {file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"}, {file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"}, {file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"}, {file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"}, {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, {file = "cffi-1.14.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534"}, {file = "cffi-1.14.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a"}, {file = "cffi-1.14.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5"}, {file = "cffi-1.14.6-cp35-cp35m-win32.whl", hash = "sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca"}, {file = "cffi-1.14.6-cp35-cp35m-win_amd64.whl", hash = "sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218"}, {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"}, {file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"}, {file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"}, {file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"}, {file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"}, {file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"}, {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"}, {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"}, {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"}, {file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"}, {file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"}, {file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"}, {file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"}, {file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"}, {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"}, {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"}, {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"}, {file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"}, {file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"}, {file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"}, {file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"}, {file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"}, {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"}, {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"}, {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"}, {file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"}, {file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"}, {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, ] click = [ {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] colorful = [ {file = "colorful-0.5.4-py2.py3-none-any.whl", hash = "sha256:8d264b52a39aae4c0ba3e2a46afbaec81b0559a99be0d2cfe2aba4cf94531348"}, {file = "colorful-0.5.4.tar.gz", hash = "sha256:86848ad4e2eda60cd2519d8698945d22f6f6551e23e95f3f14dfbb60997807ea"}, ] cryptography = [ {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, ] defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] deprecated = [ {file = "Deprecated-1.2.12-py2.py3-none-any.whl", hash = "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771"}, {file = "Deprecated-1.2.12.tar.gz", hash = "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1"}, ] idna = [ {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, ] importlib-metadata = [ {file = "importlib_metadata-4.6.3-py3-none-any.whl", hash = "sha256:51c6635429c77cf1ae634c997ff9e53ca3438b495f10a55ba28594dd69764a8b"}, {file = "importlib_metadata-4.6.3.tar.gz", hash = "sha256:0645585859e9a6689c523927a5032f2ba5919f1f7d0e84bd4533312320de1ff9"}, ] isort = [ {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, ] lazy-object-proxy = [ {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, ] lxml = [ {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"}, {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"}, {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] ospd = [ {file = "ospd-21.4.3-py3-none-any.whl", hash = "sha256:8ff13ac0ad2caef529c407ce0480c4482c243e8ea14f5a5fc6dce52232a604f6"}, {file = "ospd-21.4.3.tar.gz", hash = "sha256:54f81cb618662b4c0bb8e2d1552b5e770206777c4bd1bfa05004f2cb05999789"}, ] packaging = [ {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] paramiko = [ {file = "paramiko-2.7.2-py2.py3-none-any.whl", hash = "sha256:4f3e316fef2ac628b05097a637af35685183111d4bc1b5979bd397c2ab7b5898"}, {file = "paramiko-2.7.2.tar.gz", hash = "sha256:7f36f4ba2c0d81d219f4595e35f70d56cc94f9ac40a6acdf51d6ca210ce65035"}, ] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pontos = [ {file = "pontos-21.7.4-py3-none-any.whl", hash = "sha256:780647abc41235c382cf1735319eb8d9e98607b46608b321787303019e6b4443"}, {file = "pontos-21.7.4.tar.gz", hash = "sha256:342a5939fbbed1ca4b9b34991987b6a8d1c52627047319c8ce73b69f432cc9a5"}, ] psutil = [ {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, ] pycparser = [ {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, ] pylint = [ {file = "pylint-2.9.6-py3-none-any.whl", hash = "sha256:2e1a0eb2e8ab41d6b5dbada87f066492bb1557b12b76c47c2ee8aa8a11186594"}, {file = "pylint-2.9.6.tar.gz", hash = "sha256:8b838c8983ee1904b2de66cce9d0b96649a91901350e956d78f289c3bc87b48e"}, ] pynacl = [ {file = "PyNaCl-1.4.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff"}, {file = "PyNaCl-1.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514"}, {file = "PyNaCl-1.4.0-cp27-cp27m-win32.whl", hash = "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574"}, {file = "PyNaCl-1.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"}, {file = "PyNaCl-1.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7"}, {file = "PyNaCl-1.4.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122"}, {file = "PyNaCl-1.4.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d"}, {file = "PyNaCl-1.4.0-cp35-cp35m-win32.whl", hash = "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4"}, {file = "PyNaCl-1.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25"}, {file = "PyNaCl-1.4.0-cp36-cp36m-win32.whl", hash = "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4"}, {file = "PyNaCl-1.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6"}, {file = "PyNaCl-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f"}, {file = "PyNaCl-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f"}, {file = "PyNaCl-1.4.0-cp38-cp38-win32.whl", hash = "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96"}, {file = "PyNaCl-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420"}, {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, ] regex = [ {file = "regex-2021.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8764a78c5464ac6bde91a8c87dd718c27c1cabb7ed2b4beaf36d3e8e390567f9"}, {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4551728b767f35f86b8e5ec19a363df87450c7376d7419c3cac5b9ceb4bce576"}, {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:577737ec3d4c195c4aef01b757905779a9e9aee608fa1cf0aec16b5576c893d3"}, {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c856ec9b42e5af4fe2d8e75970fcc3a2c15925cbcc6e7a9bcb44583b10b95e80"}, {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3835de96524a7b6869a6c710b26c90e94558c31006e96ca3cf6af6751b27dca1"}, {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cea56288eeda8b7511d507bbe7790d89ae7049daa5f51ae31a35ae3c05408531"}, {file = "regex-2021.8.3-cp36-cp36m-win32.whl", hash = "sha256:a4eddbe2a715b2dd3849afbdeacf1cc283160b24e09baf64fa5675f51940419d"}, {file = "regex-2021.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:57fece29f7cc55d882fe282d9de52f2f522bb85290555b49394102f3621751ee"}, {file = "regex-2021.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a5c6dbe09aff091adfa8c7cfc1a0e83fdb8021ddb2c183512775a14f1435fe16"}, {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff4a8ad9638b7ca52313d8732f37ecd5fd3c8e3aff10a8ccb93176fd5b3812f6"}, {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63e3571b24a7959017573b6455e05b675050bbbea69408f35f3cb984ec54363"}, {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fbc20975eee093efa2071de80df7f972b7b35e560b213aafabcec7c0bd00bd8c"}, {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14caacd1853e40103f59571f169704367e79fb78fac3d6d09ac84d9197cadd16"}, {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb350eb1060591d8e89d6bac4713d41006cd4d479f5e11db334a48ff8999512f"}, {file = "regex-2021.8.3-cp37-cp37m-win32.whl", hash = "sha256:18fdc51458abc0a974822333bd3a932d4e06ba2a3243e9a1da305668bd62ec6d"}, {file = "regex-2021.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:026beb631097a4a3def7299aa5825e05e057de3c6d72b139c37813bfa351274b"}, {file = "regex-2021.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16d9eaa8c7e91537516c20da37db975f09ac2e7772a0694b245076c6d68f85da"}, {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3905c86cc4ab6d71635d6419a6f8d972cab7c634539bba6053c47354fd04452c"}, {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937b20955806381e08e54bd9d71f83276d1f883264808521b70b33d98e4dec5d"}, {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28e8af338240b6f39713a34e337c3813047896ace09d51593d6907c66c0708ba"}, {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c09d88a07483231119f5017904db8f60ad67906efac3f1baa31b9b7f7cca281"}, {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85f568892422a0e96235eb8ea6c5a41c8ccbf55576a2260c0160800dbd7c4f20"}, {file = "regex-2021.8.3-cp38-cp38-win32.whl", hash = "sha256:bf6d987edd4a44dd2fa2723fca2790f9442ae4de2c8438e53fcb1befdf5d823a"}, {file = "regex-2021.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:8fe58d9f6e3d1abf690174fd75800fda9bdc23d2a287e77758dc0e8567e38ce6"}, {file = "regex-2021.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7976d410e42be9ae7458c1816a416218364e06e162b82e42f7060737e711d9ce"}, {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9569da9e78f0947b249370cb8fadf1015a193c359e7e442ac9ecc585d937f08d"}, {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bbe342c5b2dec5c5223e7c363f291558bc27982ef39ffd6569e8c082bdc83"}, {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f421e3cdd3a273bace013751c345f4ebeef08f05e8c10757533ada360b51a39"}, {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea212df6e5d3f60341aef46401d32fcfded85593af1d82b8b4a7a68cd67fdd6b"}, {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a3b73390511edd2db2d34ff09aa0b2c08be974c71b4c0505b4a048d5dc128c2b"}, {file = "regex-2021.8.3-cp39-cp39-win32.whl", hash = "sha256:f35567470ee6dbfb946f069ed5f5615b40edcbb5f1e6e1d3d2b114468d505fc6"}, {file = "regex-2021.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:bfa6a679410b394600eafd16336b2ce8de43e9b13f7fb9247d84ef5ad2b45e91"}, {file = "regex-2021.8.3.tar.gz", hash = "sha256:8935937dad2c9b369c3d932b0edbc52a62647c2afb2fafc0c280f14a8bf56a6a"}, ] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] rope = [ {file = "rope-0.18.0.tar.gz", hash = "sha256:786b5c38c530d4846aa68a42604f61b4e69a493390e3ca11b88df0fbfdc3ed04"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomlkit = [ {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, ] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, ] urllib3 = [ {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] zipp = [ {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] ospd-openvas-21.4.3/poetry.toml000066400000000000000000000000401413127500500164260ustar00rootroot00000000000000[virtualenvs] in-project = true ospd-openvas-21.4.3/pyproject.toml000066400000000000000000000042651413127500500171400ustar00rootroot00000000000000[build-system] requires = ["setuptools", "wheel"] [tool.poetry] name = "ospd-openvas" version = "21.4.3" description = "ospd based scanner for openvas" authors = ["Greenbone Networks GmbH "] license = "AGPL-3.0-or-later" readme = "README.md" homepage = "https://github.com/greenbone/ospd-openvas" repository = "https://github.com/greenbone/ospd-openvas" classifiers=[ # Full list: https://pypi.org/pypi?%3Aaction=list_classifiers "Development Status :: 4 - Beta", "License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", ] keywords = [ "openvas", "Greenbone Vulnerability Management", "Vulnerability Scanning", "OSP", "Open Scanner Protocol", ] packages = [ { include = "ospd_openvas"}, { include = "docs/ospd-openvas.8", format = "sdist"}, { include = "config/ospd-openvas.service", format = "sdist"}, { include = "config/ospd-openvas.conf", format = "sdist"}, { include = "tests", format = "sdist" }, { include = "CHANGELOG.md", format = "sdist"}, { include = "COPYING", format = "sdist"}, { include = "poetry.lock", format = "sdist"}, { include = "poetry.toml", format = "sdist"}, { include = "setup.py", format = "sdist"}, { include = "MANIFEST.in", format = "sdist"}, ] [tool.poetry.dependencies] python = "^3.7" redis = "^3.5.3" psutil = "^5.5.1" packaging = "^20.4" ospd = "^21.4" [tool.poetry.dev-dependencies] pylint = "^2.8.2" rope = "^0.18.0" autohooks-plugin-pylint = "^21.4.0" autohooks-plugin-black = {version = "^21.4.0", python = "^3.7"} black = {version = "20.8b1", python = "^3.7"} pontos = "^21.7.2" [tool.poetry.scripts] ospd-openvas = "ospd_openvas.daemon:main" [tool.black] line-length = 80 target-version = ['py37', 'py38'] skip-string-normalization = true exclude = ''' /( \.git | \.hg | \.venv | \.circleci | \.github | \.vscode | _build | build | dist | docs )/ ''' [tool.autohooks] mode = "poetry" pre-commit = ['autohooks.plugins.black'] [tool.pontos.version] version-module-file = "ospd_openvas/__version__.py" ospd-openvas-21.4.3/setup.py000066400000000000000000000042441413127500500157330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . from setuptools import setup, find_packages from ospd_openvas import __version__ setup( name='ospd-openvas', version=__version__, packages=find_packages(exclude=['tests*']), url='https://github.com/greenbone/ospd-openvas', author='Greenbone Networks GmbH', author_email='info@greenbone.net', license='AGPLv3+', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 5 - Production/Stable', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], python_requires='>=3.7', install_requires=[ 'ospd>=21.4.0', 'redis>=3.0.1', 'psutil', 'packaging', ], entry_points={'console_scripts': ['ospd-openvas=ospd_openvas.daemon:main']}, test_suite="tests", ) ospd-openvas-21.4.3/tests/000077500000000000000000000000001413127500500153575ustar00rootroot00000000000000ospd-openvas-21.4.3/tests/__init__.py000066400000000000000000000014041413127500500174670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . ospd-openvas-21.4.3/tests/dummydaemon.py000066400000000000000000000136701413127500500202570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . from unittest.mock import patch, MagicMock from xml.etree import ElementTree as et from ospd_openvas.daemon import OSPDopenvas class FakeDataManager: def __init__(self): pass def dict(self): return dict() class DummyDaemon(OSPDopenvas): VTS = { '1.3.6.1.4.1.25623.1.0.100061': { 'creation_time': '1237458156', 'custom': { 'category': '3', 'excluded_keys': 'Settings/disable_cgi_scanning', 'family': 'Product detection', 'filename': 'mantis_detect.nasl', 'required_ports': 'Services/www, 80', 'timeout': '0', }, 'modification_time': ('1533906565'), 'name': 'Mantis Detection', 'qod_type': 'remote_banner', 'insight': 'some insight', 'severities': { 'severity_base_vector': 'AV:N/AC:L/Au:N/C:N/I:N/A:N', 'severity_type': 'cvss_base_v2', 'severity_date': '1237458156', 'severity_origin': 'Greenbone', }, 'solution': 'some solution', 'solution_type': 'WillNotFix', 'solution_method': 'DebianAPTUpgrade', 'impact': 'some impact', 'summary': 'some summary', 'affected': 'some affection', 'vt_dependencies': [], 'vt_params': { '1': { 'id': '1', 'default': '', 'description': 'Description', 'name': 'Data length :', 'type': 'entry', }, '2': { 'id': '2', 'default': 'no', 'description': 'Description', 'name': 'Do not randomize the order in which ports are scanned', # pylint: disable=line-too-long 'type': 'checkbox', }, }, 'vt_refs': { 'bid': [''], 'cve': [''], 'xref': ['URL:http://www.mantisbt.org/'], }, } } @patch('ospd_openvas.daemon.NVTICache') @patch('ospd_openvas.daemon.MainDB') def __init__( self, _MainDBClass: MagicMock = None, NvtiClass: MagicMock = None ): assert _MainDBClass assert NvtiClass nvti = NvtiClass.return_value oids = [['mantis_detect.nasl', '1.3.6.1.4.1.25623.1.0.100061']] nvti.get_oids.return_value = oids nvti.get_nvt_params.return_value = { '1': { 'id': '1', 'default': '', 'description': 'Description', 'name': 'Data length :', 'type': 'entry', }, '2': { 'id': '2', 'default': 'no', 'description': 'Description', 'name': 'Do not randomize the order in which ports are scanned', # pylint: disable=line-too-long 'type': 'checkbox', }, } nvti.get_nvt_refs.return_value = { 'bid': [''], 'cve': [''], 'xref': ['URL:http://www.mantisbt.org/'], } nvti.get_nvt_metadata.return_value = { 'category': '3', 'creation_date': '1237458156', 'cvss_base_vector': 'AV:N/AC:L/Au:N/C:N/I:N/A:N', 'excluded_keys': 'Settings/disable_cgi_scanning', 'family': 'Product detection', 'filename': 'mantis_detect.nasl', 'last_modification': ('1533906565'), 'name': 'Mantis Detection', 'qod_type': 'remote_banner', 'required_ports': 'Services/www, 80', 'solution': 'some solution', 'solution_type': 'WillNotFix', 'solution_method': 'DebianAPTUpgrade', 'impact': 'some impact', 'insight': 'some insight', 'summary': ('some summary'), 'affected': 'some affection', 'timeout': '0', 'vt_params': { '1': { 'id': '1', 'default': '', 'description': 'Description', 'name': 'Data length :', 'type': 'entry', }, '2': { 'id': '2', 'default': 'no', 'description': 'Description', 'name': 'Do not randomize the order in which ports are scanned', # pylint: disable=line-too-long 'type': 'checkbox', }, }, 'refs': { 'bid': [''], 'cve': [''], 'xref': ['URL:http://www.mantisbt.org/'], }, } nvti.get_feed_version.return_value = '123' super().__init__(niceness=10, lock_file_dir='/tmp') self.scan_collection.data_manager = FakeDataManager() def create_xml_target(self) -> et.Element: target = et.fromstring( "" "192.168.0.1" "80,443" "" ) return target ospd-openvas-21.4.3/tests/helper.py000066400000000000000000000264771413127500500172300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=protected-access from unittest.mock import Mock def assert_called_once(mock: Mock): if hasattr(mock, 'assert_called_once'): return mock.assert_called_once() if not mock.call_count == 1: msg = "Expected '%s' to have been called once. Called %s times.%s" % ( mock._mock_name or 'mock', mock.call_count, mock._calls_repr(), ) raise AssertionError(msg) def assert_called(mock: Mock): """assert that the mock was called at least once""" if mock.call_count == 0: msg = f"Expected {mock._mock_name or 'mock'} to have been called." raise AssertionError(msg) OSPD_PARAMS_OUT = { 'auto_enable_dependencies': { 'type': 'boolean', 'name': 'auto_enable_dependencies', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': 'Automatically enable the plugins that are depended on', }, 'cgi_path': { 'type': 'string', 'name': 'cgi_path', 'default': '/cgi-bin:/scripts', 'mandatory': 1, 'visible_for_client': True, 'description': 'Look for default CGIs in /cgi-bin and /scripts', }, 'checks_read_timeout': { 'type': 'integer', 'name': 'checks_read_timeout', 'default': 5, 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Number of seconds that the security checks will ' + 'wait for when doing a recv()' ), }, 'non_simult_ports': { 'type': 'string', 'name': 'non_simult_ports', 'default': '139, 445, 3389, Services/irc', 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Prevent to make two connections on the same given ' + 'ports at the same time.' ), }, 'open_sock_max_attempts': { 'type': 'integer', 'name': 'open_sock_max_attempts', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Number of unsuccessful retries to open the socket ' + 'before to set the port as closed.' ), }, 'timeout_retry': { 'type': 'integer', 'name': 'timeout_retry', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Number of retries when a socket connection attempt ' + 'timesout.' ), }, 'optimize_test': { 'type': 'boolean', 'name': 'optimize_test', 'default': 1, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'By default, optimize_test is enabled which means openvas does ' + 'trust the remote host banners and is only launching plugins ' + 'against the services they have been designed to check. ' + 'For example it will check a web server claiming to be IIS only ' + 'for IIS related flaws but will skip plugins testing for Apache ' + 'flaws, and so on. This default behavior is used to optimize ' + 'the scanning performance and to avoid false positives. ' + 'If you are not sure that the banners of the remote host ' + 'have been tampered with, you can disable this option.' ), }, 'plugins_timeout': { 'type': 'integer', 'name': 'plugins_timeout', 'default': 5, 'mandatory': 0, 'visible_for_client': True, 'description': 'This is the maximum lifetime, in seconds of a plugin.', }, 'report_host_details': { 'type': 'boolean', 'name': 'report_host_details', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'safe_checks': { 'type': 'boolean', 'name': 'safe_checks', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': ( 'Disable the plugins with potential to crash ' + 'the remote services' ), }, 'scanner_plugins_timeout': { 'type': 'integer', 'name': 'scanner_plugins_timeout', 'default': 36000, 'mandatory': 1, 'visible_for_client': True, 'description': 'Like plugins_timeout, but for ACT_SCANNER plugins.', }, 'time_between_request': { 'type': 'integer', 'name': 'time_between_request', 'default': 0, 'mandatory': 0, 'visible_for_client': True, 'description': ( 'Allow to set a wait time between two actions ' + '(open, send, close).' ), }, 'unscanned_closed': { 'type': 'boolean', 'name': 'unscanned_closed', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'unscanned_closed_udp': { 'type': 'boolean', 'name': 'unscanned_closed_udp', 'default': 1, 'mandatory': 1, 'visible_for_client': True, 'description': '', }, 'expand_vhosts': { 'type': 'boolean', 'name': 'expand_vhosts', 'default': 1, 'mandatory': 0, 'visible_for_client': True, 'description': 'Whether to expand the target hosts ' + 'list of vhosts with values gathered from sources ' + 'such as reverse-lookup queries and VT checks ' + 'for SSL/TLS certificates.', }, 'test_empty_vhost': { 'type': 'boolean', 'name': 'test_empty_vhost', 'default': 0, 'mandatory': 0, 'visible_for_client': True, 'description': 'If set to yes, the scanner will ' + 'also test the target by using empty vhost value ' + 'in addition to the targets associated vhost values.', }, 'max_hosts': { 'type': 'integer', 'name': 'max_hosts', 'default': 30, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'The maximum number of hosts to test at the same time which ' + 'should be given to the client (which can override it). ' + 'This value must be computed given your bandwidth, ' + 'the number of hosts you want to test, your amount of ' + 'memory and the performance of your processor(s).' ), }, 'max_checks': { 'type': 'integer', 'name': 'max_checks', 'default': 10, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'The number of plugins that will run against each host being ' + 'tested. Note that the total number of process will be max ' + 'checks x max_hosts so you need to find a balance between ' + 'these two options. Note that launching too many plugins at ' + 'the same time may disable the remote host, either temporarily ' + '(ie: inetd closes its ports) or definitely (the remote host ' + 'crash because it is asked to do too many things at the ' + 'same time), so be careful.' ), }, 'port_range': { 'type': 'string', 'name': 'port_range', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'This is the default range of ports that the scanner plugins will ' + 'probe. The syntax of this option is flexible, it can be a ' + 'single range ("1-1500"), several ports ("21,23,80"), several ' + 'ranges of ports ("1-1500,32000-33000"). Note that you can ' + 'specify UDP and TCP ports by prefixing each range by T or U. ' + 'For instance, the following range will make openvas scan UDP ' + 'ports 1 to 1024 and TCP ports 1 to 65535 : ' + '"T:1-65535,U:1-1024".' ), }, 'test_alive_hosts_only': { 'type': 'boolean', 'name': 'test_alive_hosts_only', 'default': 0, 'mandatory': 0, 'visible_for_client': False, 'description': ( 'If this option is set, openvas will scan the target list for ' + 'alive hosts in a separate process while only testing those ' + 'hosts which are identified as alive. This boosts the scan ' + 'speed of target ranges with a high amount of dead hosts ' + 'significantly.' ), }, 'source_iface': { 'type': 'string', 'name': 'source_iface', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Name of the network interface that will be used as the source ' + 'of connections established by openvas. The scan won\'t be ' + 'launched if the value isn\'t authorized according to ' + '(sys_)ifaces_allow / (sys_)ifaces_deny if present.' ), }, 'ifaces_allow': { 'type': 'string', 'name': 'ifaces_allow', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of interfaces names that are authorized ' + 'as source_iface values.' ), }, 'ifaces_deny': { 'type': 'string', 'name': 'ifaces_deny', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of interfaces names that are not ' + 'authorized as source_iface values.' ), }, 'hosts_allow': { 'type': 'string', 'name': 'hosts_allow', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of the only targets that are authorized ' + 'to be scanned. Supports the same syntax as the list targets. ' + 'Both target hostnames and the address to which they resolve ' + 'are checked. Hostnames in hosts_allow list are not resolved ' + 'however.' ), }, 'hosts_deny': { 'type': 'string', 'name': 'hosts_deny', 'default': '', 'mandatory': 0, 'visible_for_client': False, 'description': ( 'Comma-separated list of targets that are not authorized to ' + 'be scanned. Supports the same syntax as the list targets. ' + 'Both target hostnames and the address to which they resolve ' + 'are checked. Hostnames in hosts_deny list are not ' + 'resolved however.' ), }, } ospd-openvas-21.4.3/tests/test_daemon.py000066400000000000000000000555471413127500500202530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=line-too-long """ Unit Test for ospd-openvas """ import io import logging from unittest import TestCase from unittest.mock import patch, Mock, MagicMock from ospd.protocol import OspRequest from tests.dummydaemon import DummyDaemon from tests.helper import assert_called_once from ospd_openvas.daemon import ( OSPD_PARAMS, OpenVasVtsFilter, ) from ospd_openvas.openvas import Openvas from .helper import OSPD_PARAMS_OUT class TestOspdOpenvas(TestCase): @patch('ospd_openvas.daemon.Openvas') def test_set_params_from_openvas_settings(self, mock_openvas: Openvas): mock_openvas.get_settings.return_value = { 'non_simult_ports': '139, 445, 3389, Services/irc', 'plugins_folder': '/foo/bar', } dummy = DummyDaemon() dummy.set_params_from_openvas_settings() self.assertEqual(mock_openvas.get_settings.call_count, 1) self.assertEqual(OSPD_PARAMS, OSPD_PARAMS_OUT) self.assertEqual( dummy.scan_only_params.get('plugins_folder'), '/foo/bar' ) @patch('ospd_openvas.daemon.Openvas') def test_sudo_available(self, mock_openvas): mock_openvas.check_sudo.return_value = True dummy = DummyDaemon() dummy._sudo_available = None # pylint: disable=protected-access dummy._is_running_as_root = False # pylint: disable=protected-access self.assertTrue(dummy.sudo_available) def test_get_custom_xml(self): out = ( '' 'Services/www, 80' '3' 'Settings/disable_cgi_scanning' 'Product detection' 'mantis_detect.nasl' '0' '' ) dummy = DummyDaemon() vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] res = dummy.get_custom_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', vt.get('custom') ) self.assertEqual(len(res), len(out)) def test_get_custom_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() custom = {'a': u"\u0006"} dummy.get_custom_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', custom=custom ) assert_called_once(logging.Logger.warning) def test_get_severities_xml(self): dummy = DummyDaemon() out = ( '' '' 'AV:N/AC:L/Au:N/C:N/I:N/A:N' 'Greenbone' '1237458156' '' '' ) vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] severities = vt.get('severities') res = dummy.get_severities_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', severities ) self.assertEqual(res, out) def test_get_severities_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() sever = {'severity_base_vector': u"\u0006"} dummy.get_severities_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', severities=sever ) assert_called_once(logging.Logger.warning) def test_get_params_xml(self): dummy = DummyDaemon() out = ( '' '' 'Do not randomize the order in which ports are ' 'scanned' 'no' '' '' 'Data length :' '' '' ) vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] params = vt.get('vt_params') res = dummy.get_params_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', params ) self.assertEqual(len(res), len(out)) def test_get_params_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() params = { '1': { 'id': '1', 'type': 'entry', 'default': u'\u0006', 'name': 'dns-fuzz.timelimit', 'description': 'Description', } } dummy.get_params_vt_as_xml_str('1.3.6.1.4.1.25623.1.0.100061', params) assert_called_once(logging.Logger.warning) def test_get_refs_xml(self): dummy = DummyDaemon() out = '' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] refs = vt.get('vt_refs') res = dummy.get_refs_vt_as_xml_str('1.3.6.1.4.1.25623.1.0.100061', refs) self.assertEqual(res, out) def test_get_dependencies_xml(self): dummy = DummyDaemon() out = ( '' '' '' '' ) dep = ['1.3.6.1.4.1.25623.1.2.3.4', '1.3.6.1.4.1.25623.4.3.2.1'] res = dummy.get_dependencies_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', dep ) self.assertEqual(res, out) def test_get_dependencies_xml_missing_dep(self): dummy = DummyDaemon() out = ( '' '' '' ) dep = ['1.3.6.1.4.1.25623.1.2.3.4', 'file_name.nasl'] res = dummy.get_dependencies_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', dep ) self.assertEqual(res, out) def test_get_dependencies_xml_failed(self): dummy = DummyDaemon() logging.Logger.error = Mock() dep = [u"\u0006"] dummy.get_dependencies_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', vt_dependencies=dep ) assert_called_once(logging.Logger.error) def test_get_ctime_xml(self): dummy = DummyDaemon() out = '1237458156' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] ctime = vt.get('creation_time') res = dummy.get_creation_time_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', ctime ) self.assertEqual(res, out) def test_get_ctime_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() ctime = u'\u0006' dummy.get_creation_time_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', vt_creation_time=ctime ) assert_called_once(logging.Logger.warning) def test_get_mtime_xml(self): dummy = DummyDaemon() out = '1533906565' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] mtime = vt.get('modification_time') res = dummy.get_modification_time_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', mtime ) self.assertEqual(res, out) def test_get_mtime_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() mtime = u'\u0006' dummy.get_modification_time_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', mtime ) assert_called_once(logging.Logger.warning) def test_get_summary_xml(self): dummy = DummyDaemon() out = 'some summary' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] summary = vt.get('summary') res = dummy.get_summary_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', summary ) self.assertEqual(res, out) def test_get_summary_xml_failed(self): dummy = DummyDaemon() summary = u'\u0006' logging.Logger.warning = Mock() dummy.get_summary_vt_as_xml_str('1.3.6.1.4.1.25623.1.0.100061', summary) assert_called_once(logging.Logger.warning) def test_get_impact_xml(self): dummy = DummyDaemon() out = 'some impact' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] impact = vt.get('impact') res = dummy.get_impact_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', impact ) self.assertEqual(res, out) def test_get_impact_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() impact = u'\u0006' dummy.get_impact_vt_as_xml_str('1.3.6.1.4.1.25623.1.0.100061', impact) assert_called_once(logging.Logger.warning) def test_get_insight_xml(self): dummy = DummyDaemon() out = 'some insight' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] insight = vt.get('insight') res = dummy.get_insight_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', insight ) self.assertEqual(res, out) def test_get_insight_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() insight = u'\u0006' dummy.get_insight_vt_as_xml_str('1.3.6.1.4.1.25623.1.0.100061', insight) assert_called_once(logging.Logger.warning) def test_get_solution_xml(self): dummy = DummyDaemon() out = ( '' 'some solution' '' ) vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] solution = vt.get('solution') solution_type = vt.get('solution_type') solution_method = vt.get('solution_method') res = dummy.get_solution_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', solution, solution_type, solution_method, ) self.assertEqual(res, out) def test_get_solution_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() solution = u'\u0006' dummy.get_solution_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', solution ) assert_called_once(logging.Logger.warning) def test_get_detection_xml(self): dummy = DummyDaemon() out = '' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] detection_type = vt.get('qod_type') res = dummy.get_detection_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', qod_type=detection_type ) self.assertEqual(res, out) def test_get_detection_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() detection = u'\u0006' dummy.get_detection_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', detection ) assert_called_once(logging.Logger.warning) def test_get_affected_xml(self): dummy = DummyDaemon() out = 'some affection' vt = dummy.VTS['1.3.6.1.4.1.25623.1.0.100061'] affected = vt.get('affected') res = dummy.get_affected_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', affected=affected ) self.assertEqual(res, out) def test_get_affected_xml_failed(self): dummy = DummyDaemon() logging.Logger.warning = Mock() affected = u"\u0006" + "affected" dummy.get_affected_vt_as_xml_str( '1.3.6.1.4.1.25623.1.0.100061', affected=affected ) assert_called_once(logging.Logger.warning) @patch('ospd_openvas.daemon.Path.exists') @patch('ospd_openvas.daemon.OSPDopenvas.set_params_from_openvas_settings') def test_feed_is_outdated_none( self, mock_set_params: MagicMock, mock_path_exists: MagicMock ): dummy = DummyDaemon() dummy.scan_only_params['plugins_folder'] = '/foo/bar' # Return None mock_path_exists.return_value = False ret = dummy.feed_is_outdated('1234') self.assertIsNone(ret) self.assertEqual(mock_set_params.call_count, 1) self.assertEqual(mock_path_exists.call_count, 1) @patch('ospd_openvas.daemon.Path.exists') @patch('ospd_openvas.daemon.Path.open') def test_feed_is_outdated_true( self, mock_path_open: MagicMock, mock_path_exists: MagicMock, ): read_data = 'PLUGIN_SET = "1235";' mock_path_exists.return_value = True mock_read = MagicMock(name='Path open context manager') mock_read.__enter__ = MagicMock(return_value=io.StringIO(read_data)) mock_path_open.return_value = mock_read dummy = DummyDaemon() # Return True dummy.scan_only_params['plugins_folder'] = '/foo/bar' ret = dummy.feed_is_outdated('1234') self.assertTrue(ret) self.assertEqual(mock_path_exists.call_count, 1) self.assertEqual(mock_path_open.call_count, 1) @patch('ospd_openvas.daemon.Path.exists') @patch('ospd_openvas.daemon.Path.open') def test_feed_is_outdated_false( self, mock_path_open: MagicMock, mock_path_exists: MagicMock, ): mock_path_exists.return_value = True read_data = 'PLUGIN_SET = "1234"' mock_path_exists.return_value = True mock_read = MagicMock(name='Path open context manager') mock_read.__enter__ = MagicMock(return_value=io.StringIO(read_data)) mock_path_open.return_value = mock_read dummy = DummyDaemon() dummy.scan_only_params['plugins_folder'] = '/foo/bar' ret = dummy.feed_is_outdated('1234') self.assertFalse(ret) self.assertEqual(mock_path_exists.call_count, 1) self.assertEqual(mock_path_open.call_count, 1) def test_check_feed_cache_unavailable(self): dummy = DummyDaemon() dummy.vts.is_cache_available = False dummy.feed_is_outdated = Mock() dummy.feed_is_outdated.assert_not_called() @patch('ospd_openvas.daemon.BaseDB') @patch('ospd_openvas.daemon.ResultList.add_scan_log_to_list') def test_get_openvas_result(self, mock_add_scan_log_to_list, db_class_mock): dummy = DummyDaemon() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) results = [ "LOG|||192.168.0.1|||localhost|||general/Host_Details||||||Host" " dead", ] db_class_mock.get_result.return_value = results mock_add_scan_log_to_list.return_value = None dummy.report_openvas_results(db_class_mock, '123-456') mock_add_scan_log_to_list.assert_called_with( host='192.168.0.1', hostname='localhost', name='', port='general/Host_Details', qod='', test_id='', uri='', value='Host dead', ) @patch('ospd_openvas.daemon.BaseDB') @patch('ospd_openvas.daemon.ResultList.add_scan_error_to_list') def test_get_openvas_result_host_deny( self, mock_add_scan_error_to_list, db_class_mock ): dummy = DummyDaemon() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) results = [ "ERRMSG|||127.0.0.1|||localhost|||||||||Host access denied.", ] db_class_mock.get_result.return_value = results mock_add_scan_error_to_list.return_value = None dummy.report_openvas_results(db_class_mock, '123-456') mock_add_scan_error_to_list.assert_called_with( host='127.0.0.1', hostname='localhost', name='', port='', test_id='', uri='', value='Host access denied.', ) @patch('ospd_openvas.daemon.BaseDB') def test_get_openvas_result_dead_hosts(self, db_class_mock): dummy = DummyDaemon() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) results = [ "DEADHOST||| ||| ||| ||| |||4", ] db_class_mock.get_result.return_value = results dummy.scan_collection.set_amount_dead_hosts = MagicMock() dummy.report_openvas_results(db_class_mock, '123-456') dummy.scan_collection.set_amount_dead_hosts.assert_called_with( '123-456', total_dead=4, ) @patch('ospd_openvas.daemon.BaseDB') @patch('ospd_openvas.daemon.ResultList.add_scan_log_to_list') def test_get_openvas_result_host_start( self, mock_add_scan_log_to_list, db_class_mock ): dummy = DummyDaemon() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) results = [ "HOST_START|||192.168.10.124||| ||| ||||||today 1", ] db_class_mock.get_result.return_value = results mock_add_scan_log_to_list.return_value = None dummy.report_openvas_results(db_class_mock, '123-456') mock_add_scan_log_to_list.assert_called_with( host='192.168.10.124', name='HOST_START', value='today 1', ) @patch('ospd_openvas.daemon.BaseDB') def test_get_openvas_result_hosts_count(self, db_class_mock): dummy = DummyDaemon() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) results = [ "HOSTS_COUNT||| ||| ||| ||| |||4", ] db_class_mock.get_result.return_value = results dummy.set_scan_total_hosts = MagicMock() dummy.report_openvas_results(db_class_mock, '123-456') dummy.set_scan_total_hosts.assert_called_with( '123-456', 4, ) @patch('ospd_openvas.daemon.BaseDB') @patch('ospd_openvas.daemon.ResultList.add_scan_alarm_to_list') def test_result_without_vt_oid( self, mock_add_scan_alarm_to_list, db_class_mock ): dummy = DummyDaemon() logging.Logger.warning = Mock() target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) dummy.scan_collection.scans_table['123-456']['results'] = list() results = ["ALARM||| ||| ||| ||| |||some alarm|||path", None] db_class_mock.get_result.return_value = results mock_add_scan_alarm_to_list.return_value = None dummy.report_openvas_results(db_class_mock, '123-456') assert_called_once(logging.Logger.warning) @patch('psutil.Popen') def test_openvas_is_alive_already_stopped(self, mock_process): dummy = DummyDaemon() mock_process.is_running.return_value = True ret = dummy.is_openvas_process_alive(mock_process) self.assertTrue(ret) @patch('psutil.Popen') def test_openvas_is_alive_still(self, mock_process): dummy = DummyDaemon() mock_process.is_running.return_value = False ret = dummy.is_openvas_process_alive(mock_process) self.assertFalse(ret) @patch('ospd_openvas.daemon.OSPDaemon.set_scan_progress_batch') @patch('ospd_openvas.daemon.OSPDaemon.sort_host_finished') @patch('ospd_openvas.db.KbDB') def test_report_openvas_scan_status( self, mock_db, mock_sort_host_finished, mock_set_scan_progress_batch ): dummy = DummyDaemon() mock_set_scan_progress_batch.return_value = None mock_sort_host_finished.return_value = None mock_db.get_scan_status.return_value = [ '192.168.0.1/15/1000', '192.168.0.2/15/0', '192.168.0.3/15/-1', '192.168.0.4/1500/1500', ] target_element = dummy.create_xml_target() targets = OspRequest.process_target_element(target_element) dummy.create_scan('123-456', targets, None, []) dummy.report_openvas_scan_status(mock_db, '123-456') mock_set_scan_progress_batch.assert_called_with( '123-456', host_progress={ '192.168.0.1': 1, '192.168.0.3': -1, '192.168.0.4': 100, }, ) mock_sort_host_finished.assert_called_with( '123-456', ['192.168.0.3', '192.168.0.4'] ) class TestFilters(TestCase): def test_format_vt_modification_time(self): ovformat = OpenVasVtsFilter(None) td = '1517443741' formatted = ovformat.format_vt_modification_time(td) self.assertEqual(formatted, "20180201000901") def test_get_filtered_vts_false(self): dummy = DummyDaemon() vts_collection = ['1234', '1.3.6.1.4.1.25623.1.0.100061'] ovfilter = OpenVasVtsFilter(dummy.nvti) res = ovfilter.get_filtered_vts_list( vts_collection, "modification_time<10" ) self.assertNotIn('1.3.6.1.4.1.25623.1.0.100061', res) def test_get_filtered_vts_true(self): dummy = DummyDaemon() vts_collection = ['1234', '1.3.6.1.4.1.25623.1.0.100061'] ovfilter = OpenVasVtsFilter(dummy.nvti) res = ovfilter.get_filtered_vts_list( vts_collection, "modification_time>10" ) self.assertIn('1.3.6.1.4.1.25623.1.0.100061', res) def test_get_severity_score_v2(self): dummy = DummyDaemon() vtaux = { 'severities': { 'severity_type': 'cvss_base_v2', 'severity_base_vector': 'AV:N/AC:L/Au:N/C:P/I:N/A:N', } } dummy.get_severity_score(vtaux) self.assertEqual(dummy.get_severity_score(vtaux), 5.0) def test_get_severity_score_v3(self): dummy = DummyDaemon() vtaux = { 'severities': { 'severity_type': 'cvss_base_v3', 'severity_base_vector': ( 'CVSS:3.0/AV:L/AC:H/PR:H/UI:R/S:U/C:N/I:L/A:L' ), } } self.assertEqual(dummy.get_severity_score(vtaux), 2.9) ospd-openvas-21.4.3/tests/test_db.py000066400000000000000000000550671413127500500173720ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=unused-argument """ Unit Test for ospd-openvas """ import logging from unittest import TestCase from unittest.mock import patch, MagicMock from redis.exceptions import ConnectionError as RCE from ospd.errors import RequiredArgument from ospd_openvas.db import OpenvasDB, MainDB, ScanDB, KbDB, DBINDEX_NAME, time from ospd_openvas.errors import OspdOpenvasError from tests.helper import assert_called @patch('ospd_openvas.db.redis.Redis') class TestOpenvasDB(TestCase): @patch('ospd_openvas.db.Openvas') def test_get_db_connection( self, mock_openvas: MagicMock, mock_redis: MagicMock ): OpenvasDB._db_address = None # pylint: disable=protected-access mock_settings = mock_openvas.get_settings.return_value mock_settings.get.return_value = None self.assertIsNone(OpenvasDB.get_database_address()) # set the first time mock_openvas.get_settings.return_value = {'db_address': '/foo/bar'} self.assertEqual(OpenvasDB.get_database_address(), "/foo/bar") self.assertEqual(mock_openvas.get_settings.call_count, 2) # should cache address self.assertEqual(OpenvasDB.get_database_address(), "/foo/bar") self.assertEqual(mock_openvas.get_settings.call_count, 2) def test_create_context_fail(self, mock_redis): mock_redis.side_effect = RCE logging.Logger.error = MagicMock() with patch.object(time, 'sleep', return_value=None): with self.assertRaises(SystemExit): OpenvasDB.create_context() logging.Logger.error.assert_called_with( # pylint: disable=no-member 'Redis Error: Not possible to connect to the kb.' ) def test_create_context_success(self, mock_redis): ctx = mock_redis.return_value ret = OpenvasDB.create_context() self.assertIs(ret, ctx) def test_select_database_error(self, mock_redis): with self.assertRaises(RequiredArgument): OpenvasDB.select_database(None, 1) with self.assertRaises(RequiredArgument): OpenvasDB.select_database(mock_redis, None) def test_select_database(self, mock_redis): mock_redis.execute_command.return_value = mock_redis OpenvasDB.select_database(mock_redis, 1) mock_redis.execute_command.assert_called_with('SELECT 1') def test_get_list_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.get_list_item(None, 'foo') with self.assertRaises(RequiredArgument): OpenvasDB.get_list_item(ctx, None) def test_get_list_item(self, mock_redis): ctx = mock_redis.return_value ctx.lrange.return_value = ['1234'] ret = OpenvasDB.get_list_item(ctx, 'name') self.assertEqual(ret, ['1234']) assert_called(ctx.lrange) def test_get_last_list_item(self, mock_redis): ctx = mock_redis.return_value ctx.rpop.return_value = 'foo' ret = OpenvasDB.get_last_list_item(ctx, 'name') self.assertEqual(ret, 'foo') ctx.rpop.assert_called_with('name') def test_get_last_list_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.get_last_list_item(ctx, None) with self.assertRaises(RequiredArgument): OpenvasDB.get_last_list_item(None, 'name') def test_remove_list_item(self, mock_redis): ctx = mock_redis.return_value ctx.lrem.return_value = 1 OpenvasDB.remove_list_item(ctx, 'name', '1234') ctx.lrem.assert_called_once_with('name', count=0, value='1234') def test_remove_list_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.remove_list_item(None, '1', 'bar') with self.assertRaises(RequiredArgument): OpenvasDB.remove_list_item(ctx, None, 'bar') with self.assertRaises(RequiredArgument): OpenvasDB.remove_list_item(ctx, '1', None) def test_get_single_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.get_single_item(None, 'foo') with self.assertRaises(RequiredArgument): OpenvasDB.get_single_item(ctx, None) def test_get_single_item(self, mock_redis): ctx = mock_redis.return_value ctx.lindex.return_value = 'a' value = OpenvasDB.get_single_item(ctx, 'a') self.assertEqual(value, 'a') ctx.lindex.assert_called_once_with('a', 0) def test_add_single_list(self, mock_redis): ctx = mock_redis.return_value pipeline = ctx.pipeline.return_value pipeline.delete.return_value = None pipeline.execute.return_value = (None, 0) OpenvasDB.add_single_list(ctx, 'a', ['12', '11', '12']) pipeline.delete.assert_called_once_with('a') pipeline.rpush.assert_called_once_with('a', '12', '11', '12') assert_called(pipeline.execute) def test_add_single_item(self, mock_redis): ctx = mock_redis.return_value ctx.rpush.return_value = 1 OpenvasDB.add_single_item(ctx, 'a', ['12', '12']) ctx.rpush.assert_called_once_with('a', '12') def test_add_single_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.add_single_item(None, '1', ['12']) with self.assertRaises(RequiredArgument): OpenvasDB.add_single_item(ctx, None, ['12']) with self.assertRaises(RequiredArgument): OpenvasDB.add_single_item(ctx, '1', None) def test_set_single_item_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.set_single_item(None, '1', ['12']) with self.assertRaises(RequiredArgument): OpenvasDB.set_single_item(ctx, None, ['12']) with self.assertRaises(RequiredArgument): OpenvasDB.set_single_item(ctx, '1', None) def test_pop_list_items_no_results(self, mock_redis): ctx = mock_redis.return_value pipeline = ctx.pipeline.return_value pipeline.lrange.return_value = None pipeline.delete.return_value = None pipeline.execute.return_value = (None, 0) ret = OpenvasDB.pop_list_items(ctx, 'foo') self.assertEqual(ret, []) pipeline.lrange.assert_called_once_with('foo', 0, -1) pipeline.delete.assert_called_once_with('foo') assert_called(pipeline.execute) def test_pop_list_items_with_results(self, mock_redis): ctx = mock_redis.return_value pipeline = ctx.pipeline.return_value pipeline.lrange.return_value = None pipeline.delete.return_value = None pipeline.execute.return_value = [['c', 'b', 'a'], 2] ret = OpenvasDB.pop_list_items(ctx, 'results') # reversed list self.assertEqual(ret, ['a', 'b', 'c']) pipeline.lrange.assert_called_once_with('results', 0, -1) pipeline.delete.assert_called_once_with('results') assert_called(pipeline.execute) def test_set_single_item(self, mock_redis): ctx = mock_redis.return_value pipeline = ctx.pipeline.return_value pipeline.delete.return_value = None pipeline.rpush.return_value = None pipeline.execute.return_value = None OpenvasDB.set_single_item(ctx, 'foo', ['bar']) pipeline.delete.assert_called_once_with('foo') pipeline.rpush.assert_called_once_with('foo', 'bar') assert_called(pipeline.execute) def test_get_pattern(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = ['a', 'b'] ctx.lrange.return_value = [1, 2, 3] ret = OpenvasDB.get_pattern(ctx, 'a') self.assertEqual(ret, [['a', [1, 2, 3]], ['b', [1, 2, 3]]]) def test_get_pattern_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.get_pattern(None, 'a') with self.assertRaises(RequiredArgument): OpenvasDB.get_pattern(ctx, None) def test_get_filenames_and_oids_error(self, mock_redis): with self.assertRaises(RequiredArgument): OpenvasDB.get_filenames_and_oids(None) def test_get_filenames_and_oids(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = ['nvt:1', 'nvt:2'] ctx.lindex.side_effect = ['aa', 'ab'] ret = OpenvasDB.get_filenames_and_oids(ctx) self.assertEqual(list(ret), [('aa', '1'), ('ab', '2')]) def test_get_keys_by_pattern_error(self, mock_redis): ctx = mock_redis.return_value with self.assertRaises(RequiredArgument): OpenvasDB.get_keys_by_pattern(None, 'a') with self.assertRaises(RequiredArgument): OpenvasDB.get_keys_by_pattern(ctx, None) def test_get_keys_by_pattern(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = ['nvt:2', 'nvt:1'] ret = OpenvasDB.get_keys_by_pattern(ctx, 'nvt:*') # Return sorted list self.assertEqual(ret, ['nvt:1', 'nvt:2']) def test_get_key_count(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = ['aa', 'ab'] ret = OpenvasDB.get_key_count(ctx, "foo") self.assertEqual(ret, 2) ctx.keys.assert_called_with('foo') def test_get_key_count_with_default_pattern(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = ['aa', 'ab'] ret = OpenvasDB.get_key_count(ctx) self.assertEqual(ret, 2) ctx.keys.assert_called_with('*') def test_get_key_count_error(self, mock_redis): with self.assertRaises(RequiredArgument): OpenvasDB.get_key_count(None) def test_find_database_by_pattern_none(self, mock_redis): ctx = mock_redis.return_value ctx.keys.return_value = None new_ctx, index = OpenvasDB.find_database_by_pattern('foo*', 123) self.assertIsNone(new_ctx) self.assertIsNone(index) def test_find_database_by_pattern(self, mock_redis): ctx = mock_redis.return_value # keys is called twice per iteration ctx.keys.side_effect = [None, None, None, None, True, True] new_ctx, index = OpenvasDB.find_database_by_pattern('foo*', 123) self.assertEqual(new_ctx, ctx) self.assertEqual(index, 2) @patch('ospd_openvas.db.OpenvasDB') class ScanDBTestCase(TestCase): @patch('ospd_openvas.db.redis.Redis') def setUp(self, mock_redis): # pylint: disable=arguments-differ self.ctx = mock_redis.return_value self.db = ScanDB(10, self.ctx) def test_get_result(self, mock_openvas_db): mock_openvas_db.pop_list_items.return_value = [ 'some result', ] ret = self.db.get_result() self.assertEqual( ret, [ 'some result', ], ) mock_openvas_db.pop_list_items.assert_called_with( self.ctx, 'internal/results' ) def test_get_status(self, mock_openvas_db): mock_openvas_db.get_single_item.return_value = 'some status' ret = self.db.get_status('foo') self.assertEqual(ret, 'some status') mock_openvas_db.get_single_item.assert_called_with( self.ctx, 'internal/foo' ) def test_select(self, mock_openvas_db): ret = self.db.select(11) self.assertIs(ret, self.db) self.assertEqual(self.db.index, 11) mock_openvas_db.select_database.assert_called_with(self.ctx, 11) def test_flush(self, mock_openvas_db): self.db.flush() self.ctx.flushdb.assert_called_with() @patch('ospd_openvas.db.OpenvasDB') class KbDBTestCase(TestCase): @patch('ospd_openvas.db.redis.Redis') def setUp(self, mock_redis): # pylint: disable=arguments-differ self.ctx = mock_redis.return_value self.db = KbDB(10, self.ctx) def test_get_result(self, mock_openvas_db): mock_openvas_db.pop_list_items.return_value = [ 'some results', ] ret = self.db.get_result() self.assertEqual( ret, [ 'some results', ], ) mock_openvas_db.pop_list_items.assert_called_with( self.ctx, 'internal/results' ) def test_get_status(self, mock_openvas_db): mock_openvas_db.get_single_item.return_value = 'some status' ret = self.db.get_status('foo') self.assertEqual(ret, 'some status') mock_openvas_db.get_single_item.assert_called_with( self.ctx, 'internal/foo' ) def test_get_scan_status(self, mock_openvas_db): status = [ '192.168.0.1/10/120', '192.168.0.2/35/120', ] mock_openvas_db.pop_list_items.return_value = status ret = self.db.get_scan_status() self.assertEqual(ret, status) mock_openvas_db.pop_list_items.assert_called_with( self.ctx, 'internal/status' ) def test_flush(self, mock_openvas_db): self.db.flush() self.ctx.flushdb.assert_called_with() def test_add_scan_id(self, mock_openvas_db): self.db.add_scan_id('bar') calls = mock_openvas_db.add_single_item.call_args_list call = calls[0] kwargs = call[0] self.assertEqual(kwargs[1], 'internal/bar') self.assertEqual(kwargs[2], ['new']) call = calls[1] kwargs = call[0] self.assertEqual(kwargs[1], 'internal/scanid') self.assertEqual(kwargs[2], ['bar']) def test_add_scan_preferences(self, mock_openvas_db): prefs = ['foo', 'bar'] self.db.add_scan_preferences('foo', prefs) mock_openvas_db.add_single_item.assert_called_with( self.ctx, 'internal/foo/scanprefs', prefs ) @patch('ospd_openvas.db.OpenvasDB') def test_add_credentials_to_scan_preferences( self, mock_redis, mock_openvas_db ): prefs = ['foo', 'bar'] ctx = mock_redis.return_value mock_openvas_db.create_context.return_value = ctx self.db.add_credentials_to_scan_preferences('scan_id', prefs) mock_openvas_db.create_context.assert_called_with( self.db.index, encoding='utf-8' ) mock_openvas_db.add_single_item.assert_called_with( ctx, 'internal/scan_id/scanprefs', prefs ) def test_add_scan_process_id(self, mock_openvas_db): self.db.add_scan_process_id(123) mock_openvas_db.add_single_item.assert_called_with( self.ctx, 'internal/ovas_pid', [123] ) def test_get_scan_process_id(self, mock_openvas_db): mock_openvas_db.get_single_item.return_value = '123' ret = self.db.get_scan_process_id() self.assertEqual(ret, '123') mock_openvas_db.get_single_item.assert_called_with( self.ctx, 'internal/ovas_pid' ) def test_remove_scan_database(self, mock_openvas_db): scan_db = MagicMock(spec=ScanDB) scan_db.index = 123 self.db.remove_scan_database(scan_db) mock_openvas_db.remove_list_item.assert_called_with( self.ctx, 'internal/dbindex', 123 ) def test_target_is_finished_false(self, mock_openvas_db): mock_openvas_db.get_single_item.side_effect = ['new'] ret = self.db.target_is_finished('bar') self.assertFalse(ret) calls = mock_openvas_db.get_single_item.call_args_list call = calls[0] args = call[0] self.assertEqual(args[1], 'internal/bar') def test_target_is_finished_true(self, mock_openvas_db): mock_openvas_db.get_single_item.side_effect = ['finished'] ret = self.db.target_is_finished('bar') self.assertTrue(ret) calls = mock_openvas_db.get_single_item.call_args_list call = calls[0] args = call[0] self.assertEqual(args[1], 'internal/bar') def test_stop_scan(self, mock_openvas_db): self.db.stop_scan('foo') mock_openvas_db.set_single_item.assert_called_with( self.ctx, 'internal/foo', ['stop_all'] ) def test_scan_is_stopped_false(self, mock_openvas_db): mock_openvas_db.get_single_item.return_value = 'new' ret = self.db.scan_is_stopped('foo') self.assertFalse(ret) mock_openvas_db.get_single_item.assert_called_with( self.ctx, 'internal/foo' ) def test_scan_is_stopped_true(self, mock_openvas_db): mock_openvas_db.get_single_item.return_value = 'stop_all' ret = self.db.scan_is_stopped('foo') self.assertTrue(ret) mock_openvas_db.get_single_item.assert_called_with( self.ctx, 'internal/foo' ) def test_get_scan_databases(self, mock_openvas_db): mock_openvas_db.get_list_item.return_value = [ '4', self.db.index, '7', '11', ] scan_dbs = self.db.get_scan_databases() scan_db = next(scan_dbs) self.assertEqual(scan_db.index, '4') scan_db = next(scan_dbs) self.assertEqual(scan_db.index, '7') scan_db = next(scan_dbs) self.assertEqual(scan_db.index, '11') with self.assertRaises(StopIteration): next(scan_dbs) @patch('ospd_openvas.db.redis.Redis') class MainDBTestCase(TestCase): def test_max_database_index_fail(self, mock_redis): ctx = mock_redis.return_value ctx.config_get.return_value = {} maindb = MainDB(ctx) with self.assertRaises(OspdOpenvasError): max_db = ( # pylint: disable=unused-variable maindb.max_database_index ) ctx.config_get.assert_called_with('databases') def test_max_database_index(self, mock_redis): ctx = mock_redis.return_value ctx.config_get.return_value = {'databases': '123'} maindb = MainDB(ctx) max_db = maindb.max_database_index self.assertEqual(max_db, 123) ctx.config_get.assert_called_with('databases') def test_try_database_success(self, mock_redis): ctx = mock_redis.return_value ctx.hsetnx.return_value = 1 maindb = MainDB(ctx) ret = maindb.try_database(1) self.assertEqual(ret, True) ctx.hsetnx.assert_called_with(DBINDEX_NAME, 1, 1) def test_try_database_false(self, mock_redis): ctx = mock_redis.return_value ctx.hsetnx.return_value = 0 maindb = MainDB(ctx) ret = maindb.try_database(1) self.assertEqual(ret, False) ctx.hsetnx.assert_called_with(DBINDEX_NAME, 1, 1) def test_try_db_index_error(self, mock_redis): ctx = mock_redis.return_value ctx.hsetnx.side_effect = Exception maindb = MainDB(ctx) with self.assertRaises(OspdOpenvasError): maindb.try_database(1) def test_release_database_by_index(self, mock_redis): ctx = mock_redis.return_value ctx.hdel.return_value = 1 maindb = MainDB(ctx) maindb.release_database_by_index(3) ctx.hdel.assert_called_once_with(DBINDEX_NAME, 3) def test_release_database(self, mock_redis): ctx = mock_redis.return_value ctx.hdel.return_value = 1 db = MagicMock() db.index = 3 maindb = MainDB(ctx) maindb.release_database(db) ctx.hdel.assert_called_once_with(DBINDEX_NAME, 3) db.flush.assert_called_with() def test_release(self, mock_redis): ctx = mock_redis.return_value maindb = MainDB(ctx) maindb.release() ctx.hdel.assert_called_with(DBINDEX_NAME, maindb.index) ctx.flushdb.assert_called_with() def test_get_new_kb_database(self, mock_redis): ctx = mock_redis.return_value maindb = MainDB(ctx) maindb._max_dbindex = 123 # pylint: disable=protected-access ctx.hsetnx.side_effect = [0, 0, 1] kbdb = maindb.get_new_kb_database() self.assertEqual(kbdb.index, 3) ctx.flushdb.assert_called_once_with() def test_get_new_kb_database_none(self, mock_redis): ctx = mock_redis.return_value maindb = MainDB(ctx) maindb._max_dbindex = 3 # pylint: disable=protected-access ctx.hsetnx.side_effect = [0, 0, 0] kbdb = maindb.get_new_kb_database() self.assertIsNone(kbdb) ctx.flushdb.assert_not_called() @patch('ospd_openvas.db.OpenvasDB') def test_find_kb_database_by_scan_id_none( self, mock_openvas_db, mock_redis ): ctx = mock_redis.return_value new_ctx = 'bar' # just some object to compare mock_openvas_db.create_context.return_value = new_ctx mock_openvas_db.get_key_count.return_value = None maindb = MainDB(ctx) maindb._max_dbindex = 2 # pylint: disable=protected-access kbdb = maindb.find_kb_database_by_scan_id('foo') mock_openvas_db.get_key_count.assert_called_once_with( new_ctx, 'internal/foo' ) self.assertIsNone(kbdb) @patch('ospd_openvas.db.OpenvasDB') def test_find_kb_database_by_scan_id(self, mock_openvas_db, mock_redis): ctx = mock_redis.return_value new_ctx = 'foo' # just some object to compare mock_openvas_db.create_context.return_value = new_ctx mock_openvas_db.get_key_count.side_effect = [0, 1] maindb = MainDB(ctx) maindb._max_dbindex = 3 # pylint: disable=protected-access kbdb = maindb.find_kb_database_by_scan_id('foo') mock_openvas_db.get_key_count.assert_called_with( new_ctx, 'internal/foo' ) self.assertEqual(kbdb.index, 2) self.assertIs(kbdb.ctx, new_ctx) ospd-openvas-21.4.3/tests/test_lock.py000066400000000000000000000070731413127500500177270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . import unittest import shutil import tempfile from pathlib import Path, PosixPath from unittest.mock import patch, MagicMock from ospd_openvas.lock import LockFile from .helper import assert_called_once, assert_called class LockFileTestCase(unittest.TestCase): def setUp(self): self.temp_dir = Path(tempfile.mkdtemp()) def tearDown(self): shutil.rmtree(str(self.temp_dir)) def test_acquire_lock(self): lock_file_path = self.temp_dir / "test.lock" lock_file = LockFile(lock_file_path) lock_file._acquire_lock() # pylint: disable = protected-access self.assertTrue(lock_file.has_lock()) self.assertTrue(lock_file_path.exists()) lock_file._release_lock() # pylint: disable = protected-access @patch("ospd_openvas.lock.logger") def test_already_locked(self, mock_logger): lock_file_path = self.temp_dir / "test.lock" lock_file_aux = LockFile(lock_file_path) lock_file_aux._acquire_lock() # pylint: disable = protected-access self.assertTrue(lock_file_aux.has_lock()) lock_file = LockFile(lock_file_path) lock_file._acquire_lock() # pylint: disable = protected-access self.assertFalse(lock_file.has_lock()) assert_called(mock_logger.debug) lock_file_aux._release_lock() # pylint: disable = protected-access def test_create_parent_dirs(self): lock_file_path = self.temp_dir / "foo" / "bar" / "test.lock" lock_file = LockFile(lock_file_path) lock_file._acquire_lock() # pylint: disable = protected-access self.assertTrue(lock_file.has_lock()) self.assertTrue(lock_file_path.exists()) self.assertTrue(lock_file_path.parent.is_dir()) self.assertTrue(lock_file_path.parent.parent.is_dir()) lock_file._release_lock() # pylint: disable = protected-access @patch("ospd_openvas.lock.logger") def test_create_paren_dirs_fail(self, mock_logger): lock_file_path = MagicMock(spec=Path).return_value parent = MagicMock(spec=PosixPath) lock_file_path.parent = parent parent.mkdir.side_effect = PermissionError lock_file = LockFile(lock_file_path) lock_file._acquire_lock() # pylint: disable = protected-access self.assertFalse(lock_file.has_lock()) assert_called_once(mock_logger.error) def test_context_manager(self): lock_file_path = self.temp_dir / "test.lock" lock_file = LockFile(lock_file_path) with lock_file: self.assertTrue(lock_file.has_lock()) self.assertTrue(lock_file_path.is_file()) lock_file._release_lock() # pylint: disable = protected-access # The file is not removed self.assertFalse(lock_file.has_lock()) self.assertTrue(lock_file_path.is_file()) ospd-openvas-21.4.3/tests/test_nvti_cache.py000066400000000000000000000265541413127500500211070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable=unused-argument, protected-access, invalid-name """ Unit Test for ospd-openvas """ import logging from unittest import TestCase from unittest.mock import patch, Mock, PropertyMock from pathlib import Path from ospd_openvas.nvticache import NVTICache, NVTI_CACHE_NAME from tests.helper import assert_called @patch('ospd_openvas.nvticache.OpenvasDB') class TestNVTICache(TestCase): @patch('ospd_openvas.db.MainDB') def setUp(self, MockMainDB): # pylint: disable=arguments-differ self.db = MockMainDB() self.nvti = NVTICache(self.db) self.nvti._ctx = 'foo' def test_set_index(self, MockOpenvasDB): self.nvti._ctx = None MockOpenvasDB.find_database_by_pattern.return_value = ('foo', 22) ctx = self.nvti.ctx self.assertIsNotNone(ctx) self.assertEqual(ctx, 'foo') self.assertEqual(self.nvti.index, 22) def test_get_feed_version(self, MockOpenvasDB): MockOpenvasDB.get_single_item.return_value = '1234' resp = self.nvti.get_feed_version() self.assertEqual(resp, '1234') MockOpenvasDB.get_single_item.assert_called_with('foo', NVTI_CACHE_NAME) def test_get_feed_version_not_available(self, MockOpenvasDB): pmock = PropertyMock(return_value=123) type(self.db).max_database_index = pmock self.nvti._ctx = None MockOpenvasDB.find_database_by_pattern.return_value = (None, None) resp = self.nvti.get_feed_version() self.assertIsNone(resp) MockOpenvasDB.find_database_by_pattern.assert_called_with( NVTI_CACHE_NAME, 123 ) def test_get_oids(self, MockOpenvasDB): MockOpenvasDB.get_filenames_and_oids.return_value = ['oids'] resp = self.nvti.get_oids() self.assertEqual(resp, ['oids']) def test_parse_metadata_tag_missing_value(self, MockOpenvasDB): logging.Logger.error = Mock() tags = 'tag1' ret = ( NVTICache._parse_metadata_tags( # pylint: disable=protected-access tags, '1.2.3' ) ) self.assertEqual(ret, {}) assert_called(logging.Logger.error) def test_parse_metadata_tag(self, MockOpenvasDB): tags = 'tag1=value1' ret = ( NVTICache._parse_metadata_tags( # pylint: disable=protected-access tags, '1.2.3' ) ) self.assertEqual(ret, {'tag1': 'value1'}) def test_parse_metadata_tags(self, MockOpenvasDB): tags = 'tag1=value1|foo=bar' ret = ( NVTICache._parse_metadata_tags( # pylint: disable=protected-access tags, '1.2.3' ) ) self.assertEqual(ret, {'tag1': 'value1', 'foo': 'bar'}) def test_get_nvt_params(self, MockOpenvasDB): prefs1 = ['1|||dns-fuzz.timelimit|||entry|||default'] prefs2 = ['1|||dns-fuzz.timelimit|||entry|||'] prefs3 = ['1|||dns-fuzz.timelimit|||entry'] out_dict1 = { '1': { 'id': '1', 'type': 'entry', 'default': 'default', 'name': 'dns-fuzz.timelimit', 'description': 'Description', }, } out_dict2 = { '1': { 'id': '1', 'type': 'entry', 'default': '', 'name': 'dns-fuzz.timelimit', 'description': 'Description', }, } MockOpenvasDB.get_list_item.return_value = prefs1 resp = self.nvti.get_nvt_params('1.2.3.4') self.assertEqual(resp, out_dict1) MockOpenvasDB.get_list_item.return_value = prefs2 resp = self.nvti.get_nvt_params('1.2.3.4') self.assertEqual(resp, out_dict2) MockOpenvasDB.get_list_item.return_value = prefs3 resp = self.nvti.get_nvt_params('1.2.3.4') self.assertEqual(resp, out_dict2) def test_get_nvt_metadata(self, MockOpenvasDB): metadata = [ 'mantis_detect.nasl', '', '', 'Settings/disable_cgi_scanning', '', 'Services/www, 80', 'find_service.nasl, http_version.nasl', 'cvss_base_vector=AV:N/AC:L/Au:N/C:N/I:N' '/A:N|last_modification=1533906565' '|creation_date=1237458156' '|summary=Detects the ins' 'talled version of\n Mantis a free popular web-based ' 'bugtracking system.\n\n This script sends HTTP GET r' 'equest and try to get the version from the\n respons' 'e, and sets the result in KB.|qod_type=remote_banner', '', '', 'URL:http://www.mantisbt.org/', '3', '10', 'Product detection', 'Mantis Detection', ] custom = { 'category': '3', 'creation_date': '1237458156', 'cvss_base_vector': 'AV:N/AC:L/Au:N/C:N/I:N/A:N', 'dependencies': 'find_service.nasl, http_version.nasl', 'excluded_keys': 'Settings/disable_cgi_scanning', 'family': 'Product detection', 'filename': 'mantis_detect.nasl', 'last_modification': ('1533906565'), 'name': 'Mantis Detection', 'qod_type': 'remote_banner', 'refs': {'xref': ['URL:http://www.mantisbt.org/']}, 'required_ports': 'Services/www, 80', 'summary': ( 'Detects the installed version of\n Mantis a ' 'free popular web-based bugtracking system.\n' '\n This script sends HTTP GET request and t' 'ry to get the version from the\n response, ' 'and sets the result in KB.' ), 'vt_params': { '0': { 'id': '0', 'type': 'entry', 'name': 'timeout', 'description': 'Script Timeout', 'default': '10', }, '1': { 'id': '1', 'type': 'entry', 'name': 'dns-fuzz.timelimit', 'description': 'Description', 'default': 'default', }, }, } prefs1 = ['1|||dns-fuzz.timelimit|||entry|||default'] MockOpenvasDB.get_list_item.side_effect = [metadata, prefs1] resp = self.nvti.get_nvt_metadata('1.2.3.4') self.maxDiff = None self.assertEqual(resp, custom) def test_get_nvt_metadata_fail(self, MockOpenvasDB): MockOpenvasDB.get_list_item.return_value = [] resp = self.nvti.get_nvt_metadata('1.2.3.4') self.assertIsNone(resp) def test_get_nvt_refs(self, MockOpenvasDB): refs = ['', '', 'URL:http://www.mantisbt.org/'] out_dict = { 'cve': [''], 'bid': [''], 'xref': ['URL:http://www.mantisbt.org/'], } MockOpenvasDB.get_list_item.return_value = refs resp = self.nvti.get_nvt_refs('1.2.3.4') self.assertEqual(resp, out_dict) def test_get_nvt_refs_fail(self, MockOpenvasDB): MockOpenvasDB.get_list_item.return_value = [] resp = self.nvti.get_nvt_refs('1.2.3.4') self.assertIsNone(resp) def test_get_nvt_prefs(self, MockOpenvasDB): prefs = ['dns-fuzz.timelimit|||entry|||default'] MockOpenvasDB.get_list_item.return_value = prefs resp = self.nvti.get_nvt_prefs('1.2.3.4') self.assertEqual(resp, prefs) def test_get_nvt_timeout(self, MockOpenvasDB): MockOpenvasDB.get_single_item.return_value = '300' resp = self.nvti.get_nvt_timeout('1.2.3.4') self.assertEqual(resp, '300') def test_get_nvt_tags(self, MockOpenvasDB): tag = ( 'last_modification=1533906565' '|creation_date=1517443741|cvss_bas' 'e_vector=AV:N/AC:L/Au:N/C:P/I:P/A:P|solution_type=V' 'endorFix|qod_type=package|affected=rubygems on Debi' 'an Linux|solution_method=DebianAPTUpgrade' ) out_dict = { 'last_modification': '1533906565', 'creation_date': '1517443741', 'cvss_base_vector': 'AV:N/AC:L/Au:N/C:P/I:P/A:P', 'solution_type': 'VendorFix', 'qod_type': 'package', 'affected': 'rubygems on Debian Linux', 'solution_method': 'DebianAPTUpgrade', } MockOpenvasDB.get_single_item.return_value = tag resp = self.nvti.get_nvt_tags('1.2.3.4') self.assertEqual(out_dict, resp) def test_get_nvt_files_count(self, MockOpenvasDB): MockOpenvasDB.get_key_count.return_value = 20 self.assertEqual(self.nvti.get_nvt_files_count(), 20) MockOpenvasDB.get_key_count.assert_called_with('foo', 'filename:*') def test_get_nvt_count(self, MockOpenvasDB): MockOpenvasDB.get_key_count.return_value = 20 self.assertEqual(self.nvti.get_nvt_count(), 20) MockOpenvasDB.get_key_count.assert_called_with('foo', 'nvt:*') def test_force_reload(self, _MockOpenvasDB): self.nvti.force_reload() self.db.release_database.assert_called_with(self.nvti) def test_flush(self, _MockOpenvasDB): self.nvti._ctx = Mock() self.nvti.flush() self.nvti._ctx.flushdb.assert_called_with() def test_add_vt(self, MockOpenvasDB): MockOpenvasDB.add_single_list = Mock() self.nvti.add_vt_to_cache( '1234', [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', ], ) MockOpenvasDB.add_single_list.assert_called_with( 'foo', '1234', [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', ], ) def test_get_file_checksum(self, MockOpenvasDB): MockOpenvasDB.get_single_item.return_value = '123456' path = Path("/tmp/foo.csv") resp = self.nvti.get_file_checksum(path) self.assertEqual(resp, '123456') MockOpenvasDB.get_single_item.assert_called_with( 'foo', "sha256sums:/tmp/foo.csv" ) ospd-openvas-21.4.3/tests/test_openvas.py000066400000000000000000000252531413127500500204520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . import subprocess from unittest import TestCase from unittest.mock import patch, MagicMock import psutil from ospd_openvas.openvas import Openvas class OpenvasCommandTestCase(TestCase): @patch('ospd_openvas.openvas.subprocess.check_output') def test_get_version(self, mock_check_output: MagicMock): mock_check_output.return_value = b"OpenVAS 20.08" self.assertEqual(Openvas.get_version(), 'OpenVAS 20.08') mock_check_output.assert_called_with( ['openvas', '-V'], stderr=subprocess.STDOUT ) @patch('ospd_openvas.openvas.subprocess.check_output') def test_get_version_not_found(self, mock_check_output: MagicMock): mock_check_output.return_value = b"Foo 20.08" self.assertIsNone(Openvas.get_version()) mock_check_output.assert_called_with( ['openvas', '-V'], stderr=subprocess.STDOUT ) @patch('ospd_openvas.openvas.subprocess.check_output') def test_get_version_with_error(self, mock_check_output: MagicMock): mock_check_output.side_effect = subprocess.SubprocessError('foo') self.assertIsNone(Openvas.get_version()) mock_check_output.assert_called_with( ['openvas', '-V'], stderr=subprocess.STDOUT ) mock_check_output.reset_mock() mock_check_output.side_effect = OSError('foo') self.assertIsNone(Openvas.get_version()) mock_check_output.assert_called_with( ['openvas', '-V'], stderr=subprocess.STDOUT ) @patch('ospd_openvas.openvas.subprocess.check_call') def test_check(self, mock_check_call: MagicMock): self.assertTrue(Openvas.check()) mock_check_call.assert_called_with( ['openvas', '-V'], stdout=subprocess.DEVNULL ) @patch('ospd_openvas.openvas.subprocess.check_call') def test_check_with_error(self, mock_check_call: MagicMock): mock_check_call.side_effect = subprocess.SubprocessError('foo') self.assertFalse(Openvas.check()) mock_check_call.assert_called_with( ['openvas', '-V'], stdout=subprocess.DEVNULL ) mock_check_call.reset_mock() mock_check_call.side_effect = OSError('foo') self.assertFalse(Openvas.check()) mock_check_call.assert_called_with( ['openvas', '-V'], stdout=subprocess.DEVNULL ) @patch('ospd_openvas.openvas.subprocess.check_call') def test_check_sudo(self, mock_check_call: MagicMock): self.assertTrue(Openvas.check_sudo()) mock_check_call.assert_called_with( ['sudo', '-n', 'openvas', '-s'], stdout=subprocess.DEVNULL ) @patch('ospd_openvas.openvas.subprocess.check_call') def test_check_sudo_with_error(self, mock_check_call: MagicMock): mock_check_call.side_effect = subprocess.SubprocessError('foo') self.assertFalse(Openvas.check_sudo()) mock_check_call.assert_called_with( ['sudo', '-n', 'openvas', '-s'], stdout=subprocess.DEVNULL ) mock_check_call.reset_mock() mock_check_call.side_effect = OSError('foo') self.assertFalse(Openvas.check_sudo()) mock_check_call.assert_called_with( ['sudo', '-n', 'openvas', '-s'], stdout=subprocess.DEVNULL ) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_call') def test_load_vts_into_redis(self, mock_check_call, mock_logger): Openvas.load_vts_into_redis() mock_check_call.assert_called_with( ['openvas', '--update-vt-info'], stdout=subprocess.DEVNULL ) mock_logger.error.assert_not_called() @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_call') def test_load_vts_into_redis_with_error( self, mock_check_call: MagicMock, mock_logger: MagicMock ): mock_check_call.side_effect = subprocess.SubprocessError('foo') Openvas.load_vts_into_redis() mock_check_call.assert_called_with( ['openvas', '--update-vt-info'], stdout=subprocess.DEVNULL ) self.assertEqual(mock_logger.error.call_count, 1) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_output') def test_get_settings( self, mock_check_output: MagicMock, _mock_logger: MagicMock ): mock_check_output.return_value = ( b'non_simult_ports = 22 \n plugins_folder = /foo/bar\nfoo = yes\n' b'bar=no\nipsum= \nlorem\n' ) settings = Openvas.get_settings() mock_check_output.assert_called_with(['openvas', '-s']) self.assertEqual(settings['non_simult_ports'], '22') self.assertEqual(settings['plugins_folder'], '/foo/bar') self.assertEqual(settings['foo'], 1) self.assertEqual(settings['bar'], 0) self.assertFalse('ipsum' in settings) self.assertFalse('lorem' in settings) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_output') def test_get_settings_with_error( self, mock_check_output: MagicMock, _mock_logger: MagicMock ): mock_check_output.side_effect = subprocess.SubprocessError('foo') settings = Openvas.get_settings() mock_check_output.assert_called_with(['openvas', '-s']) self.assertFalse(settings) # settings dict is empty mock_check_output.reset_mock() mock_check_output.side_effect = OSError('foo') settings = Openvas.get_settings() mock_check_output.assert_called_with(['openvas', '-s']) self.assertFalse(settings) # settings dict is empty mock_check_output.reset_mock() # https://gehrcke.de/2015/12/how-to-raise-unicodedecodeerror-in-python-3/ mock_check_output.side_effect = UnicodeDecodeError( 'funnycodec', b'\x00\x00', 1, 2, 'This is just a fake reason!' ) settings = Openvas.get_settings() mock_check_output.assert_called_with(['openvas', '-s']) self.assertFalse(settings) # settings dict is empty @patch('ospd_openvas.openvas.psutil.Popen') def test_start_scan(self, mock_popen: MagicMock): proc = Openvas.start_scan('scan_1') mock_popen.assert_called_with( ['openvas', '--scan-start', 'scan_1'], shell=False ) self.assertIsNotNone(proc) @patch('ospd_openvas.openvas.psutil.Popen') def test_start_scan_with_sudo(self, mock_popen: MagicMock): proc = Openvas.start_scan('scan_1', sudo=True) mock_popen.assert_called_with( ['sudo', '-n', 'openvas', '--scan-start', 'scan_1'], shell=False ) self.assertIsNotNone(proc) @patch('ospd_openvas.openvas.psutil.Popen') def test_start_scan_with_niceness(self, mock_popen: MagicMock): proc = Openvas.start_scan('scan_1', niceness=4) mock_popen.assert_called_with( ['nice', '-n', 4, 'openvas', '--scan-start', 'scan_1'], shell=False ) self.assertIsNotNone(proc) @patch('ospd_openvas.openvas.psutil.Popen') def test_start_scan_with_niceness_and_sudo(self, mock_popen: MagicMock): proc = Openvas.start_scan('scan_1', niceness=4, sudo=True) mock_popen.assert_called_with( [ 'nice', '-n', 4, 'sudo', '-n', 'openvas', '--scan-start', 'scan_1', ], shell=False, ) self.assertIsNotNone(proc) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.psutil.Popen') def test_start_scan_error( self, mock_popen: MagicMock, mock_logger: MagicMock ): mock_popen.side_effect = psutil.Error('foo') proc = Openvas.start_scan('scan_1') mock_popen.assert_called_with( ['openvas', '--scan-start', 'scan_1'], shell=False ) self.assertIsNone(proc) self.assertEqual(mock_logger.warning.call_count, 1) mock_popen.reset_mock() mock_logger.reset_mock() mock_popen.side_effect = OSError('foo') proc = Openvas.start_scan('scan_1') mock_popen.assert_called_with( ['openvas', '--scan-start', 'scan_1'], shell=False ) self.assertIsNone(proc) self.assertEqual(mock_logger.warning.call_count, 1) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_call') def test_stop_scan( self, mock_check_call: MagicMock, _mock_logger: MagicMock ): success = Openvas.stop_scan('scan_1') mock_check_call.assert_called_with(['openvas', '--scan-stop', 'scan_1']) self.assertTrue(success) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_call') def test_stop_scan_with_sudo( self, mock_check_call: MagicMock, _mock_logger: MagicMock ): success = Openvas.stop_scan('scan_1', sudo=True) mock_check_call.assert_called_with( ['sudo', '-n', 'openvas', '--scan-stop', 'scan_1'] ) self.assertTrue(success) @patch('ospd_openvas.openvas.logger') @patch('ospd_openvas.openvas.subprocess.check_call') def test_stop_scan_with_error( self, mock_check_call: MagicMock, mock_logger: MagicMock ): mock_check_call.side_effect = subprocess.SubprocessError('foo') success = Openvas.stop_scan('scan_1') mock_check_call.assert_called_with(['openvas', '--scan-stop', 'scan_1']) self.assertFalse(success) self.assertEqual(mock_logger.warning.call_count, 1) mock_check_call.reset_mock() mock_logger.reset_mock() mock_check_call.side_effect = OSError('foo') success = Openvas.stop_scan('scan_1') mock_check_call.assert_called_with(['openvas', '--scan-stop', 'scan_1']) self.assertFalse(success) self.assertEqual(mock_logger.warning.call_count, 1) ospd-openvas-21.4.3/tests/test_preferencehandler.py000066400000000000000000001176061413127500500224570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # pylint: disable = too-many-lines import logging from unittest import TestCase from unittest.mock import call, patch, Mock, MagicMock from tests.dummydaemon import DummyDaemon from tests.helper import assert_called_once from ospd_openvas.openvas import Openvas from ospd_openvas.preferencehandler import ( AliveTest, BOREAS_SETTING_NAME, BOREAS_ALIVE_TEST, BOREAS_ALIVE_TEST_PORTS, PreferenceHandler, alive_test_methods_to_bit_field, ) class PreferenceHandlerTestCase(TestCase): @patch('ospd_openvas.db.KbDB') def test_process_vts_not_found(self, mock_kb): dummy = DummyDaemon() logging.Logger.warning = Mock() vts = { '1.3.6.1.4.1.25623.1.0.100065': {'3': 'new value'}, 'vt_groups': ['family=debian', 'family=general'], } p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, dummy.nvti ) dummy.nvti.get_nvt_metadata.return_value = None p_handler._process_vts(vts) # pylint: disable = protected-access assert_called_once(logging.Logger.warning) def test_process_vts_bad_param_id(self): dummy = DummyDaemon() vts = { '1.3.6.1.4.1.25623.1.0.100061': {'3': 'new value'}, 'vt_groups': ['family=debian', 'family=general'], } p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, dummy.nvti ) ret = p_handler._process_vts(vts) # pylint: disable = protected-access self.assertFalse(ret[1]) def test_process_vts(self): dummy = DummyDaemon() vts = { '1.3.6.1.4.1.25623.1.0.100061': {'1': 'new value'}, 'vt_groups': ['family=debian', 'family=general'], } vt_out = ( ['1.3.6.1.4.1.25623.1.0.100061'], {'1.3.6.1.4.1.25623.1.0.100061:1:entry:Data length :': 'new value'}, ) p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, dummy.nvti ) ret = p_handler._process_vts(vts) # pylint: disable = protected-access self.assertEqual(ret, vt_out) @patch('ospd_openvas.db.KbDB') def test_set_plugins_false(self, mock_kb): dummy = DummyDaemon() dummy.scan_collection.get_vts = Mock() dummy.scan_collection.get_vts.return_value = {} p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, dummy.nvti ) p_handler.kbdb.add_scan_preferences = Mock() ret = p_handler.prepare_plugins_for_openvas() self.assertFalse(ret) @patch('ospd_openvas.db.KbDB') def test_set_plugins_true(self, mock_kb): dummy = DummyDaemon() vts = { '1.3.6.1.4.1.25623.1.0.100061': {'3': 'new value'}, 'vt_groups': ['family=debian', 'family=general'], } dummy.scan_collection.get_vts = Mock() dummy.scan_collection.get_vts.return_value = vts p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, dummy.nvti ) p_handler.kbdb.add_scan_preferences = Mock() ret = p_handler.prepare_plugins_for_openvas() self.assertTrue(ret) def test_build_credentials_ssh_up(self): dummy = DummyDaemon() cred_out = [ 'auth_port_ssh|||22', '1.3.6.1.4.1.25623.1.0.103591:7:' 'entry:SSH privilege login name:|||test', '1.3.6.1.4.1.25623.1.0.103591:8:' 'password:SSH privilege password:|||1234', '1.3.6.1.4.1.25623.1.0.103591:1:entry:SSH login name:|||username', '1.3.6.1.4.1.25623.1.0.103591:3:' 'password:SSH password (unsafe!):|||pass', ] cred_dict = { 'ssh': { 'type': 'up', 'port': '22', 'username': 'username', 'password': 'pass', 'priv_username': 'test', 'priv_password': '1234', } } p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_credentials_as_prefs(cred_dict) self.assertEqual(ret, cred_out) def test_build_credentials(self): dummy = DummyDaemon() cred_out = [ '1.3.6.1.4.1.25623.1.0.105058:1:entry:ESXi login name:|||username', '1.3.6.1.4.1.25623.1.0.105058:2:password:' 'ESXi login password:|||pass', 'auth_port_ssh|||22', '1.3.6.1.4.1.25623.1.0.103591:1:entry:SSH login name:|||username', '1.3.6.1.4.1.25623.1.0.103591:2:' 'password:SSH key passphrase:|||pass', '1.3.6.1.4.1.25623.1.0.103591:4:file:SSH private key:|||', '1.3.6.1.4.1.25623.1.0.90023:1:entry:SMB login:|||username', '1.3.6.1.4.1.25623.1.0.90023:2:password]:SMB password :|||pass', '1.3.6.1.4.1.25623.1.0.105076:1:' 'password:SNMP Community:some comunity', '1.3.6.1.4.1.25623.1.0.105076:2:entry:SNMPv3 Username:username', '1.3.6.1.4.1.25623.1.0.105076:3:password:SNMPv3 Password:pass', '1.3.6.1.4.1.25623.1.0.105076:4:' 'radio:SNMPv3 Authentication Algorithm:some auth algo', '1.3.6.1.4.1.25623.1.0.105076:5:' 'password:SNMPv3 Privacy Password:privacy pass', '1.3.6.1.4.1.25623.1.0.105076:6:' 'radio:SNMPv3 Privacy Algorithm:privacy algo', '1.3.6.1.4.1.25623.1.0.105076:7:' 'entry:SSH privilege login name:|||test', '1.3.6.1.4.1.25623.1.0.105076:8:' 'password:SSH privilege password:|||1234', ] cred_dict = { 'ssh': { 'type': 'ssh', 'port': '22', 'username': 'username', 'password': 'pass', 'priv_username': 'test', 'priv_password': '1234', }, 'smb': {'type': 'smb', 'username': 'username', 'password': 'pass'}, 'esxi': { 'type': 'esxi', 'username': 'username', 'password': 'pass', }, 'snmp': { 'type': 'snmp', 'username': 'username', 'password': 'pass', 'community': 'some comunity', 'auth_algorithm': 'some auth algo', 'privacy_password': 'privacy pass', 'privacy_algorithm': 'privacy algo', }, } p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_credentials_as_prefs(cred_dict) self.assertEqual(len(ret), len(cred_out)) self.assertIn('auth_port_ssh|||22', cred_out) self.assertIn( '1.3.6.1.4.1.25623.1.0.90023:1:entry:SMB login:|||username', cred_out, ) def test_build_alive_test_opt_empty(self): dummy = DummyDaemon() target_options_dict = {'alive_test': '0'} p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_alive_test_opt_as_prefs(target_options_dict) self.assertEqual(ret, {}) # alive test was supplied via separate xml element dummy = DummyDaemon() target_options_dict = {'alive_test_methods': '1', 'icmp': '0'} p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_alive_test_opt_as_prefs(target_options_dict) self.assertEqual(ret, {}) def test_build_alive_test_opt(self): dummy = DummyDaemon() alive_test_out = { "1.3.6.1.4.1.25623.1.0.100315:1:checkbox:Do a TCP ping": "no", "1.3.6.1.4.1.25623.1.0.100315:2:checkbox:" "TCP ping tries also TCP-SYN ping": "no", "1.3.6.1.4.1.25623.1.0.100315:7:checkbox:" "TCP ping tries only TCP-SYN ping": "no", "1.3.6.1.4.1.25623.1.0.100315:3:checkbox:Do an ICMP ping": "yes", "1.3.6.1.4.1.25623.1.0.100315:4:checkbox:Use ARP": "no", ( "1.3.6.1.4.1.25623.1.0.100315:5:checkbox:" "Mark unrechable Hosts as dead (not scanning)" ): "yes", } target_options_dict = {'alive_test': '2'} p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_alive_test_opt_as_prefs(target_options_dict) self.assertEqual(ret, alive_test_out) # alive test was supplied via sepertae xml element dummy = DummyDaemon() target_options_dict = {'alive_test_methods': '1', 'icmp': '1'} p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) ret = p_handler.build_alive_test_opt_as_prefs(target_options_dict) self.assertEqual(ret, alive_test_out) def test_build_alive_test_opt_fail_1(self): dummy = DummyDaemon() logging.Logger.debug = Mock() target_options_dict = {'alive_test': 'a'} p_handler = PreferenceHandler( '1234-1234', None, dummy.scan_collection, None ) target_options = p_handler.build_alive_test_opt_as_prefs( target_options_dict ) assert_called_once(logging.Logger.debug) self.assertEqual(len(target_options), 0) @patch('ospd_openvas.db.KbDB') def test_set_target(self, mock_kb): dummy = DummyDaemon() dummy.scan_collection.get_host_list = MagicMock( return_value='192.168.0.1' ) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_target_for_openvas() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, ['TARGET|||192.168.0.1'], ) @patch('ospd_openvas.db.KbDB') def test_set_ports(self, mock_kb): dummy = DummyDaemon() dummy.scan_collection.get_ports = MagicMock(return_value='80,443') p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_ports_for_openvas() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, ['port_range|||80,443'], ) @patch('ospd_openvas.db.KbDB') def test_set_main_kbindex(self, mock_kb): dummy = DummyDaemon() p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.kbdb.add_scan_preferences = Mock() p_handler.kbdb.index = 2 p_handler.prepare_main_kbindex_for_openvas() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, ['ov_maindbid|||2'], ) @patch('ospd_openvas.db.KbDB') def test_set_credentials(self, mock_kb): dummy = DummyDaemon() creds = { 'ssh': { 'type': 'ssh', 'port': '22', 'username': 'username', 'password': 'pass', }, 'smb': {'type': 'smb', 'username': 'username', 'password': 'pass'}, 'esxi': { 'type': 'esxi', 'username': 'username', 'password': 'pass', }, 'snmp': { 'type': 'snmp', 'username': 'username', 'password': 'pass', 'community': 'some comunity', 'auth_algorithm': 'some auth algo', 'privacy_password': 'privacy pass', 'privacy_algorithm': 'privacy algo', }, } dummy.scan_collection.get_credentials = MagicMock(return_value=creds) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_credentials_to_scan_preferences = MagicMock() ret = p_handler.prepare_credentials_for_openvas() self.assertTrue(ret) @patch('ospd_openvas.db.KbDB') def test_set_bad_credentials(self, mock_kb): dummy = DummyDaemon() # bad cred type shh instead of ssh creds = { 'shh': { 'type': 'ssh', 'port': '22', 'username': 'username', 'password': 'pass', }, } dummy.scan_collection.get_credentials = MagicMock(return_value=creds) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() ret = p_handler.prepare_credentials_for_openvas() self.assertFalse(ret) @patch('ospd_openvas.db.KbDB') def test_set_credentials_empty(self, mock_kb): dummy = DummyDaemon() creds = {} dummy.scan_collection.get_credentials = MagicMock(return_value=creds) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() ret = p_handler.prepare_credentials_for_openvas() self.assertTrue(ret) @patch('ospd_openvas.db.KbDB') def test_set_host_options(self, mock_kb): dummy = DummyDaemon() exc = '192.168.0.1' dummy.scan_collection.get_exclude_hosts = MagicMock(return_value=exc) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_host_options_for_openvas() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, ['exclude_hosts|||192.168.0.1'], ) @patch('ospd_openvas.db.KbDB') def test_set_host_options_none(self, mock_kb): dummy = DummyDaemon() exc = '' dummy.scan_collection.get_exclude_hosts = MagicMock(return_value=exc) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_host_options_for_openvas() p_handler.kbdb.add_scan_preferences.assert_not_called() @patch('ospd_openvas.db.KbDB') def test_set_scan_params(self, mock_kb): dummy = DummyDaemon() ospd_param_dict = { 'drop_privileges': { 'type': 'boolean', 'name': 'drop_privileges', 'default': 0, 'mandatory': 1, 'description': '', }, } opt = {'drop_privileges': 1} dummy.scan_collection.get_options = MagicMock(return_value=opt) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_scan_params_for_openvas(ospd_param_dict) p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, ['drop_privileges|||yes'] ) @patch('ospd_openvas.db.KbDB') def test_set_reverse_lookup_opt(self, mock_kb): dummy = DummyDaemon() t_opt = {'reverse_lookup_only': 1} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_reverse_lookup_opt_for_openvas() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, [ 'reverse_lookup_only|||yes', 'reverse_lookup_unify|||no', ], ) @patch('ospd_openvas.db.KbDB') def test_set_boreas_alive_test_with_settings(self, mock_kb): # No Boreas config setting (BOREAS_SETTING_NAME) set dummy = DummyDaemon() ov_setting = {'not_the_correct_setting': 1} t_opt = {} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() p_handler.kbdb.add_scan_preferences.assert_not_called() # Boreas config setting set but invalid alive_test. dummy = DummyDaemon() t_opt = {'alive_test': "error"} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # ALIVE_TEST_TCP_SYN_SERVICE as alive test. dummy = DummyDaemon() t_opt = {'alive_test': AliveTest.ALIVE_TEST_TCP_SYN_SERVICE} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||16'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # ICMP was chosen as alive test. dummy = DummyDaemon() t_opt = {'alive_test': AliveTest.ALIVE_TEST_ICMP} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # "Scan Config Default" as alive_test. dummy = DummyDaemon() t_opt = {'alive_test': AliveTest.ALIVE_TEST_SCAN_CONFIG_DEFAULT} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # TCP-SYN alive test and dedicated port list for alive scan provided. dummy = DummyDaemon() t_opt = { 'alive_test_ports': "80,137", 'alive_test': AliveTest.ALIVE_TEST_TCP_SYN_SERVICE, } dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [ call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||16']), call( p_handler.scan_id, [BOREAS_ALIVE_TEST_PORTS + '|||80,137'] ), ] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) @patch('ospd_openvas.db.KbDB') def test_set_boreas_alive_test_not_as_enum(self, mock_kb): # No Boreas config setting (BOREAS_SETTING_NAME) set dummy = DummyDaemon() ov_setting = {'not_the_correct_setting': 1} t_opt = {} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() p_handler.kbdb.add_scan_preferences.assert_not_called() # Boreas config setting set but invalid alive_test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'arp': '-1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # ICMP was chosen as alive test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'icmp': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # tcp_syn as alive test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'tcp_syn': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||16'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # tcp_ack as alive test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'tcp_ack': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||1'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # arp as alive test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'arp': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||4'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # arp as alive test. dummy = DummyDaemon() t_opt = {'alive_test_methods': "1", 'consider_alive': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||8'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # all alive test methods dummy = DummyDaemon() t_opt = { 'alive_test_methods': "1", 'icmp': '1', 'tcp_ack': '1', 'tcp_syn': '1', 'arp': '1', 'consider_alive': '1', } dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||31'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) # TCP-SYN alive test and dedicated port list for alive scan provided. dummy = DummyDaemon() t_opt = { 'alive_test_ports': "80,137", 'alive_test_methods': "1", 'tcp_syn': '1', } dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() calls = [ call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||16']), call( p_handler.scan_id, [BOREAS_ALIVE_TEST_PORTS + '|||80,137'] ), ] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) @patch('ospd_openvas.db.KbDB') def test_set_boreas_alive_test_enum_has_precedence(self, mock_kb): dummy = DummyDaemon() t_opt = { 'alive_test_methods': "1", 'consider_alive': '1', 'alive_test': AliveTest.ALIVE_TEST_ICMP, } dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {BOREAS_SETTING_NAME: 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() # has icmp and not consider_alive calls = [call(p_handler.scan_id, [BOREAS_ALIVE_TEST + '|||2'])] p_handler.kbdb.add_scan_preferences.assert_has_calls(calls) @patch('ospd_openvas.db.KbDB') def test_set_boreas_alive_test_without_settings(self, mock_kb): dummy = DummyDaemon() t_opt = {'alive_test': 16} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_boreas_alive_test() p_handler.kbdb.add_scan_preferences.assert_not_called() @patch('ospd_openvas.db.KbDB') def test_set_alive_no_setting(self, mock_kb): dummy = DummyDaemon() t_opt = {} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() p_handler.kbdb.add_scan_preferences.assert_not_called() @patch('ospd_openvas.db.KbDB') def test_set_alive_no_invalid_alive_test(self, mock_kb): dummy = DummyDaemon() t_opt = {'alive_test': -1} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {'some_setting': 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() p_handler.kbdb.add_scan_preferences.assert_not_called() @patch('ospd_openvas.db.KbDB') def test_set_alive_no_invalid_alive_test_no_enum(self, mock_kb): dummy = DummyDaemon() t_opt = {'alive_test_methods': '1', 'icmp': '-1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {'some_setting': 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() p_handler.kbdb.add_scan_preferences.assert_not_called() @patch('ospd_openvas.db.KbDB') def test_set_alive_pinghost(self, mock_kb): dummy = DummyDaemon() alive_test_out = [ "1.3.6.1.4.1.25623.1.0.100315:1:checkbox:Do a TCP ping|||no", "1.3.6.1.4.1.25623.1.0.100315:2:checkbox:" "TCP ping tries also TCP-SYN ping|||no", "1.3.6.1.4.1.25623.1.0.100315:7:checkbox:" "TCP ping tries only TCP-SYN ping|||no", "1.3.6.1.4.1.25623.1.0.100315:3:checkbox:Do an ICMP ping|||yes", "1.3.6.1.4.1.25623.1.0.100315:4:checkbox:Use ARP|||no", "1.3.6.1.4.1.25623.1.0.100315:5:checkbox:" "Mark unrechable Hosts as dead (not scanning)|||yes", ] t_opt = {'alive_test': 2} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {'some_setting': 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() for ( key, value, ) in ( p_handler._nvts_params.items() # pylint: disable = protected-access ): self.assertTrue( "{0}|||{1}".format(key, value) in alive_test_out ) @patch('ospd_openvas.db.KbDB') def test_prepare_alive_test_not_supplied_as_enum(self, mock_kb): dummy = DummyDaemon() alive_test_out = { "1.3.6.1.4.1.25623.1.0.100315:1:checkbox:Do a TCP ping": "no", "1.3.6.1.4.1.25623.1.0.100315:2:checkbox:" "TCP ping tries also TCP-SYN ping": "no", "1.3.6.1.4.1.25623.1.0.100315:7:checkbox:" "TCP ping tries only TCP-SYN ping": "no", "1.3.6.1.4.1.25623.1.0.100315:3:checkbox:Do an ICMP ping": "yes", "1.3.6.1.4.1.25623.1.0.100315:4:checkbox:Use ARP": "no", "1.3.6.1.4.1.25623.1.0.100315:5:checkbox:" "Mark unrechable Hosts as dead (not scanning)": "yes", } t_opt = {'alive_test_methods': '1', 'icmp': '1'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {'some_setting': 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() self.assertEqual( p_handler._nvts_params, # pylint: disable = protected-access alive_test_out, ) @patch('ospd_openvas.db.KbDB') def test_prepare_alive_test_no_enum_no_alive_test(self, mock_kb): dummy = DummyDaemon() t_opt = {'alive_test_methods': '1', 'icmp': '0'} dummy.scan_collection.get_target_options = MagicMock(return_value=t_opt) ov_setting = {'some_setting': 1} with patch.object(Openvas, 'get_settings', return_value=ov_setting): p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.scan_id = '456-789' p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_alive_test_option_for_openvas() p_handler.kbdb.add_scan_preferences.assert_not_called() def test_alive_test_methods_to_bit_field(self): self.assertEqual( AliveTest.ALIVE_TEST_TCP_ACK_SERVICE, alive_test_methods_to_bit_field( icmp=False, tcp_ack=True, tcp_syn=False, arp=False, consider_alive=False, ), ) self.assertEqual( AliveTest.ALIVE_TEST_ICMP, alive_test_methods_to_bit_field( icmp=True, tcp_ack=False, tcp_syn=False, arp=False, consider_alive=False, ), ) self.assertEqual( AliveTest.ALIVE_TEST_ARP, alive_test_methods_to_bit_field( icmp=False, tcp_ack=False, tcp_syn=False, arp=True, consider_alive=False, ), ) self.assertEqual( AliveTest.ALIVE_TEST_CONSIDER_ALIVE, alive_test_methods_to_bit_field( icmp=False, tcp_ack=False, tcp_syn=False, arp=False, consider_alive=True, ), ) self.assertEqual( AliveTest.ALIVE_TEST_TCP_SYN_SERVICE, alive_test_methods_to_bit_field( icmp=False, tcp_ack=False, tcp_syn=True, arp=False, consider_alive=False, ), ) all_alive_test_methods = ( AliveTest.ALIVE_TEST_SCAN_CONFIG_DEFAULT | AliveTest.ALIVE_TEST_TCP_ACK_SERVICE | AliveTest.ALIVE_TEST_ICMP | AliveTest.ALIVE_TEST_ARP | AliveTest.ALIVE_TEST_CONSIDER_ALIVE | AliveTest.ALIVE_TEST_TCP_SYN_SERVICE ) self.assertEqual( all_alive_test_methods, alive_test_methods_to_bit_field( icmp=True, tcp_ack=True, tcp_syn=True, arp=True, consider_alive=True, ), ) @patch('ospd_openvas.db.KbDB') def test_prepare_nvt_prefs(self, mock_kb): dummy = DummyDaemon() alive_test_out = [ "1.3.6.1.4.1.25623.1.0.100315:1:checkbox:Do a TCP ping|||no" ] p_handler = PreferenceHandler( '1234-1234', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = { # pylint: disable = protected-access "1.3.6.1.4.1.25623.1.0.100315:1:checkbox:Do a TCP ping": "no" } p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_nvt_preferences() p_handler.kbdb.add_scan_preferences.assert_called_with( p_handler.scan_id, alive_test_out, ) @patch('ospd_openvas.db.KbDB') def test_prepare_nvt_prefs_no_prefs(self, mock_kb): dummy = DummyDaemon() p_handler = PreferenceHandler( '456-789', mock_kb, dummy.scan_collection, None ) p_handler._nvts_params = {} # pylint: disable = protected-access p_handler.kbdb.add_scan_preferences = MagicMock() p_handler.prepare_nvt_preferences() p_handler.kbdb.add_scan_preferences.assert_not_called() ospd-openvas-21.4.3/tests/test_vthelper.py000066400000000000000000000165171413127500500206330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2014-2021 Greenbone Networks GmbH # # SPDX-License-Identifier: AGPL-3.0-or-later # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . from hashlib import sha256 from unittest import TestCase from tests.dummydaemon import DummyDaemon from tests.helper import assert_called_once from ospd_openvas.vthelper import VtHelper class VtHelperTestCase(TestCase): def test_get_single_vt(self): dummy = DummyDaemon() vthelper = VtHelper(dummy.nvti) res = vthelper.get_single_vt("1.3.6.1.4.1.25623.1.0.100061") assert_called_once(dummy.nvti.get_nvt_metadata) self.assertEqual("Mantis Detection", res.get('name')) def test_calculate_vts_collection_hash_no_params(self): dummy = DummyDaemon() vthelper = VtHelper(dummy.nvti) hash_out = vthelper.calculate_vts_collection_hash() vt_hash_str = ( '1.3.6.1.4.1.25623.1.0.10006115339065651Data ' + 'length :2Do not randomize the order in which ' + 'ports are scannedno' ) vt_hash = sha256() vt_hash.update(vt_hash_str.encode('utf-8')) hash_test = vt_hash.hexdigest() self.assertEqual(hash_test, hash_out) def test_get_vt_iterator(self): dummy = DummyDaemon() vthelper = VtHelper(dummy.nvti) vt = ["1.3.6.1.4.1.25623.1.0.100061"] for key, _ in vthelper.get_vt_iterator(): self.assertIn(key, vt) def test_get_vt_iterator_with_filter(self): dummy = DummyDaemon() vthelper = VtHelper(dummy.nvti) vt = ["1.3.6.1.4.1.25623.1.0.100061"] vtout = dummy.VTS["1.3.6.1.4.1.25623.1.0.100061"] for key, vt_dict in vthelper.get_vt_iterator(vt_selection=vt): self.assertIn(key, vt) for key2 in vtout: self.assertIn(key2, vt_dict) def test_get_vt_iterator_with_filter_no_vt(self): dummy = DummyDaemon() vthelper = VtHelper(dummy.nvti) dummy.nvti.get_nvt_metadata.return_value = None vt = ["1.3.6.1.4.1.25623.1.0.100065"] for _, values in vthelper.get_vt_iterator(vt_selection=vt): self.assertIs(values, None) def test_get_single_vt_severity_cvssv3(self): dummy = DummyDaemon() dummy.nvti.get_nvt_metadata.return_value = { 'category': '3', 'creation_date': '1237458156', 'cvss_base_vector': 'AV:N/AC:L/Au:N/C:N/I:N/A:N', 'severity_vector': 'CVSS:3.0/AV:L/AC:H/PR:H/UI:R/S:U/C:N/I:L/A:L', 'severity_date': '1237458156', 'severity_origin': 'Greenbone', 'excluded_keys': 'Settings/disable_cgi_scanning', 'family': 'Product detection', 'filename': 'mantis_detect.nasl', 'last_modification': ('1533906565'), 'name': 'Mantis Detection', 'qod_type': 'remote_banner', 'required_ports': 'Services/www, 80', 'solution': 'some solution', 'solution_type': 'WillNotFix', 'solution_method': 'DebianAPTUpgrade', 'impact': 'some impact', 'insight': 'some insight', 'summary': ('some summary'), 'affected': 'some affection', 'timeout': '0', 'vt_params': { '1': { 'id': '1', 'default': '', 'description': 'Description', 'name': 'Data length :', 'type': 'entry', }, '2': { 'id': '2', 'default': 'no', 'description': 'Description', 'name': 'Do not randomize the order in which ports are scanned', # pylint: disable=line-too-long 'type': 'checkbox', }, }, 'refs': { 'bid': [''], 'cve': [''], 'xref': ['URL:http://www.mantisbt.org/'], }, } vthelper = VtHelper(dummy.nvti) res = vthelper.get_single_vt("1.3.6.1.4.1.25623.1.0.100061") assert_called_once(dummy.nvti.get_nvt_metadata) severities = res.get('severities') self.assertEqual( "CVSS:3.0/AV:L/AC:H/PR:H/UI:R/S:U/C:N/I:L/A:L", severities.get('severity_base_vector'), ) self.assertEqual("cvss_base_v3", severities.get('severity_type')) self.assertEqual("Greenbone", severities.get('severity_origin')) self.assertEqual("1237458156", severities.get('severity_date')) def test_get_single_vt_severity_cvssv2(self): dummy = DummyDaemon() dummy.nvti.get_nvt_metadata.return_value = { 'category': '3', 'creation_date': '1237458156', 'cvss_base_vector': 'AV:N/AC:L/Au:N/C:N/I:N/A:N', 'excluded_keys': 'Settings/disable_cgi_scanning', 'family': 'Product detection', 'filename': 'mantis_detect.nasl', 'last_modification': ('1533906565'), 'name': 'Mantis Detection', 'qod_type': 'remote_banner', 'required_ports': 'Services/www, 80', 'solution': 'some solution', 'solution_type': 'WillNotFix', 'solution_method': 'DebianAPTUpgrade', 'impact': 'some impact', 'insight': 'some insight', 'summary': ('some summary'), 'affected': 'some affection', 'timeout': '0', 'vt_params': { '1': { 'id': '1', 'default': '', 'description': 'Description', 'name': 'Data length :', 'type': 'entry', }, '2': { 'id': '2', 'default': 'no', 'description': 'Description', 'name': 'Do not randomize the order in which ports are scanned', # pylint: disable=line-too-long 'type': 'checkbox', }, }, 'refs': { 'bid': [''], 'cve': [''], 'xref': ['URL:http://www.mantisbt.org/'], }, } vthelper = VtHelper(dummy.nvti) res = vthelper.get_single_vt("1.3.6.1.4.1.25623.1.0.100061") assert_called_once(dummy.nvti.get_nvt_metadata) severities = res.get('severities') self.assertEqual( "AV:N/AC:L/Au:N/C:N/I:N/A:N", severities.get('severity_base_vector'), ) self.assertEqual("cvss_base_v2", severities.get('severity_type')) self.assertEqual(None, severities.get('severity_origin')) self.assertEqual("1237458156", severities.get('severity_date'))